Browse Source

Add bitbake master 7c849be

Baurzhan Ismagulov 9 years ago
parent
commit
ff22b2b1fa
100 changed files with 29457 additions and 0 deletions
  1. 17 0
      bitbake/.gitignore
  2. 10 0
      bitbake/AUTHORS
  3. 339 0
      bitbake/COPYING
  4. 317 0
      bitbake/ChangeLog
  5. 19 0
      bitbake/HEADER
  6. 10 0
      bitbake/LICENSE
  7. 11 0
      bitbake/MANIFEST.in
  8. 62 0
      bitbake/TODO
  9. 53 0
      bitbake/bin/bitbake
  10. 138 0
      bitbake/bin/bitbake-diffsigs
  11. 65 0
      bitbake/bin/bitbake-dumpsig
  12. 1072 0
      bitbake/bin/bitbake-layers
  13. 55 0
      bitbake/bin/bitbake-prserv
  14. 55 0
      bitbake/bin/bitbake-selftest
  15. 447 0
      bitbake/bin/bitbake-worker
  16. 531 0
      bitbake/bin/bitdoc
  17. 122 0
      bitbake/bin/image-writer
  18. 332 0
      bitbake/bin/toaster
  19. 174 0
      bitbake/bin/toaster-eventreplay
  20. 67 0
      bitbake/classes/base.bbclass
  21. 50 0
      bitbake/conf/bitbake.conf
  22. 1 0
      bitbake/contrib/README
  23. 31 0
      bitbake/contrib/bbdev.sh
  24. 68 0
      bitbake/contrib/dump_cache.py
  25. 24 0
      bitbake/contrib/vim/ftdetect/bitbake.vim
  26. 2 0
      bitbake/contrib/vim/ftplugin/bitbake.vim
  27. 84 0
      bitbake/contrib/vim/plugin/newbb.vim
  28. 126 0
      bitbake/contrib/vim/syntax/bitbake.vim
  29. 339 0
      bitbake/doc/COPYING.GPL
  30. 17 0
      bitbake/doc/COPYING.MIT
  31. 91 0
      bitbake/doc/Makefile
  32. 39 0
      bitbake/doc/README
  33. 29 0
      bitbake/doc/bitbake-user-manual/bitbake-user-manual-customization.xsl
  34. 931 0
      bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.xml
  35. 765 0
      bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml
  36. 505 0
      bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml
  37. 685 0
      bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.xml
  38. 1852 0
      bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml
  39. 2319 0
      bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml
  40. 984 0
      bitbake/doc/bitbake-user-manual/bitbake-user-manual-style.css
  41. 88 0
      bitbake/doc/bitbake-user-manual/bitbake-user-manual.xml
  42. BIN
      bitbake/doc/bitbake-user-manual/figures/bitbake-title.png
  43. 281 0
      bitbake/doc/bitbake-user-manual/html.css
  44. 142 0
      bitbake/doc/bitbake.1
  45. 59 0
      bitbake/doc/poky.ent
  46. BIN
      bitbake/doc/template/Vera.ttf
  47. 0 0
      bitbake/doc/template/Vera.xml
  48. BIN
      bitbake/doc/template/VeraMoBd.ttf
  49. 0 0
      bitbake/doc/template/VeraMoBd.xml
  50. BIN
      bitbake/doc/template/VeraMono.ttf
  51. 0 0
      bitbake/doc/template/VeraMono.xml
  52. 39 0
      bitbake/doc/template/component.title.xsl
  53. 64 0
      bitbake/doc/template/db-pdf.xsl
  54. 25 0
      bitbake/doc/template/division.title.xsl
  55. BIN
      bitbake/doc/template/draft.png
  56. 58 0
      bitbake/doc/template/fop-config.xml
  57. 21 0
      bitbake/doc/template/formal.object.heading.xsl
  58. 14 0
      bitbake/doc/template/gloss-permalinks.xsl
  59. 25 0
      bitbake/doc/template/permalinks.xsl
  60. 55 0
      bitbake/doc/template/section.title.xsl
  61. 1259 0
      bitbake/doc/template/titlepage.templates.xml
  62. 51 0
      bitbake/doc/tools/docbook-to-pdf
  63. 323 0
      bitbake/lib/bb/COW.py
  64. 144 0
      bitbake/lib/bb/__init__.py
  65. 777 0
      bitbake/lib/bb/build.py
  66. 849 0
      bitbake/lib/bb/cache.py
  67. 75 0
      bitbake/lib/bb/cache_extra.py
  68. 137 0
      bitbake/lib/bb/checksum.py
  69. 436 0
      bitbake/lib/bb/codeparser.py
  70. 474 0
      bitbake/lib/bb/command.py
  71. 6 0
      bitbake/lib/bb/compat.py
  72. 2188 0
      bitbake/lib/bb/cooker.py
  73. 341 0
      bitbake/lib/bb/cookerdata.py
  74. 193 0
      bitbake/lib/bb/daemonize.py
  75. 448 0
      bitbake/lib/bb/data.py
  76. 969 0
      bitbake/lib/bb/data_smart.py
  77. 679 0
      bitbake/lib/bb/event.py
  78. 91 0
      bitbake/lib/bb/exceptions.py
  79. 1751 0
      bitbake/lib/bb/fetch2/__init__.py
  80. 143 0
      bitbake/lib/bb/fetch2/bzr.py
  81. 263 0
      bitbake/lib/bb/fetch2/clearcase.py
  82. 171 0
      bitbake/lib/bb/fetch2/cvs.py
  83. 451 0
      bitbake/lib/bb/fetch2/git.py
  84. 76 0
      bitbake/lib/bb/fetch2/gitannex.py
  85. 137 0
      bitbake/lib/bb/fetch2/gitsm.py
  86. 278 0
      bitbake/lib/bb/fetch2/hg.py
  87. 129 0
      bitbake/lib/bb/fetch2/local.py
  88. 284 0
      bitbake/lib/bb/fetch2/npm.py
  89. 135 0
      bitbake/lib/bb/fetch2/osc.py
  90. 187 0
      bitbake/lib/bb/fetch2/perforce.py
  91. 98 0
      bitbake/lib/bb/fetch2/repo.py
  92. 129 0
      bitbake/lib/bb/fetch2/sftp.py
  93. 128 0
      bitbake/lib/bb/fetch2/ssh.py
  94. 197 0
      bitbake/lib/bb/fetch2/svn.py
  95. 555 0
      bitbake/lib/bb/fetch2/wget.py
  96. 439 0
      bitbake/lib/bb/main.py
  97. 40 0
      bitbake/lib/bb/methodpool.py
  98. 263 0
      bitbake/lib/bb/monitordisk.py
  99. 199 0
      bitbake/lib/bb/msg.py
  100. 255 0
      bitbake/lib/bb/namedtuple_with_abc.py

+ 17 - 0
bitbake/.gitignore

@@ -0,0 +1,17 @@
+*.pyc
+*.pyo
+*~
+pyshtables.py
+/doc/manual/html/
+/build/
+/bin/bitbakec
+*.swp
+tags
+*.sqlite
+venv/
+doc/bitbake-user-manual/bitbake-user-manual.html
+doc/bitbake-user-manual/bitbake-user-manual.pdf
+doc/bitbake-user-manual/bitbake-user-manual.tgz
+lib/toaster/contrib/tts/backlog.txt
+lib/toaster/contrib/tts/log/*
+lib/toaster/contrib/tts/.cache/*

+ 10 - 0
bitbake/AUTHORS

@@ -0,0 +1,10 @@
+Tim Ansell <mithro@mithis.net>
+Phil Blundell <pb@handhelds.org>
+Seb Frankengul <seb@frankengul.org>
+Holger Freyther <holger@moiji-mobile.com>
+Marcin Juszkiewicz <marcin@juszkiewicz.com.pl>
+Chris Larson <kergoth@handhelds.org>
+Ulrich Luckas <luckas@musoft.de>
+Mickey Lauer <mickey@Vanille.de>
+Richard Purdie <rpurdie@rpsys.net>
+Holger Schurig <holgerschurig@gmx.de>

+ 339 - 0
bitbake/COPYING

@@ -0,0 +1,339 @@
+		    GNU GENERAL PUBLIC LICENSE
+		       Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+			    Preamble
+
+  The licenses for most software are designed to take away your
+freedom to share and change it.  By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users.  This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it.  (Some other Free Software Foundation software is covered by
+the GNU Lesser General Public License instead.)  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+  To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have.  You must make sure that they, too, receive or can get the
+source code.  And you must show them these terms so they know their
+rights.
+
+  We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+  Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software.  If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+  Finally, any free program is threatened constantly by software
+patents.  We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary.  To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+		    GNU GENERAL PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License.  The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language.  (Hereinafter, translation is included without limitation in
+the term "modification".)  Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope.  The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+  1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+  2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices
+    stating that you changed the files and the date of any change.
+
+    b) You must cause any work that you distribute or publish, that in
+    whole or in part contains or is derived from the Program or any
+    part thereof, to be licensed as a whole at no charge to all third
+    parties under the terms of this License.
+
+    c) If the modified program normally reads commands interactively
+    when run, you must cause it, when started running for such
+    interactive use in the most ordinary way, to print or display an
+    announcement including an appropriate copyright notice and a
+    notice that there is no warranty (or else, saying that you provide
+    a warranty) and that users may redistribute the program under
+    these conditions, and telling the user how to view a copy of this
+    License.  (Exception: if the Program itself is interactive but
+    does not normally print such an announcement, your work based on
+    the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole.  If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works.  But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+  3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable
+    source code, which must be distributed under the terms of Sections
+    1 and 2 above on a medium customarily used for software interchange; or,
+
+    b) Accompany it with a written offer, valid for at least three
+    years, to give any third party, for a charge no more than your
+    cost of physically performing source distribution, a complete
+    machine-readable copy of the corresponding source code, to be
+    distributed under the terms of Sections 1 and 2 above on a medium
+    customarily used for software interchange; or,
+
+    c) Accompany it with the information you received as to the offer
+    to distribute corresponding source code.  (This alternative is
+    allowed only for noncommercial distribution and only if you
+    received the program in object code or executable form with such
+    an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it.  For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable.  However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+  4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License.  Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+  5. You are not required to accept this License, since you have not
+signed it.  However, nothing else grants you permission to modify or
+distribute the Program or its derivative works.  These actions are
+prohibited by law if you do not accept this License.  Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+  6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions.  You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+  7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all.  For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices.  Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+  8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded.  In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+  9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number.  If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation.  If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+  10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission.  For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this.  Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+			    NO WARRANTY
+
+  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+		     END OF TERMS AND CONDITIONS
+
+	    How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License along
+    with this program; if not, write to the Free Software Foundation, Inc.,
+    51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+    Gnomovision version 69, Copyright (C) year name of author
+    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary.  Here is a sample; alter the names:
+
+  Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+  `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+  <signature of Ty Coon>, 1 April 1989
+  Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs.  If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library.  If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.

+ 317 - 0
bitbake/ChangeLog

@@ -0,0 +1,317 @@
+Changes in Bitbake 1.9.x:
+	- Add PE (Package Epoch) support from Philipp Zabel (pH5)
+	- Treat python functions the same as shell functions for logging
+	- Use TMPDIR/anonfunc as a __anonfunc temp directory (T)
+	- Catch truncated cache file errors
+	- Allow operations other than assignment on flag variables
+	- Add code to handle inter-task dependencies
+	- Fix cache errors when generation dotGraphs
+	- Make sure __inherit_cache is updated before calling include() (from Michael Krelin)
+	- Fix bug when target was in ASSUME_PROVIDED (#2236)
+	- Raise ParseError for filenames with multiple underscores instead of infinitely looping (#2062)
+	- Fix invalid regexp in BBMASK error handling (missing import) (#1124)
+	- Promote certain warnings from debug to note 2 level
+	- Update manual
+	- Correctly redirect stdin when forking
+	- If parsing errors are found, exit, too many users miss the errors
+	- Remove supriours PREFERRED_PROVIDER warnings
+	- svn fetcher: Add _buildsvncommand function
+	- Improve certain error messages
+	- Rewrite svn fetcher to make adding extra operations easier 
+	  as part of future SRCDATE="now" fixes
+	  (requires new FETCHCMD_svn definition in bitbake.conf)
+	- Change SVNDIR layout to be more unique (fixes #2644 and #2624)
+	- Add ConfigParsed Event after configuration parsing is complete
+	- Add SRCREV support for svn fetcher
+	- data.emit_var() - only call getVar if we need the variable
+	- Stop generating the A variable (seems to be legacy code)
+	- Make sure intertask depends get processed correcting in recursive depends
+	- Add pn-PN to overrides when evaluating PREFERRED_VERSION
+	- Improve the progress indicator by skipping tasks that have 
+	  already run before starting the build rather than during it
+	- Add profiling option (-P)
+	- Add BB_SRCREV_POLICY variable (clear or cache) to control SRCREV cache
+	- Add SRCREV_FORMAT support
+	- Fix local fetcher's localpath return values
+	- Apply OVERRIDES before performing immediate expansions
+	- Allow the -b -e option combination to take regular expressions
+	- Fix handling of variables with expansion in the name using _append/_prepend
+	  e.g. RRECOMMENDS_${PN}_append_xyz = "abc"
+	- Add plain message function to bb.msg
+	- Sort the list of providers before processing so dependency problems are 
+	  reproducible rather than effectively random
+	- Fix/improve bitbake -s output
+	- Add locking for fetchers so only one tries to fetch a given file at a given time
+	- Fix int(0)/None confusion in runqueue.py which causes random gaps in dependency chains	  
+	- Expand data in addtasks
+	- Print the list of missing DEPENDS,RDEPENDS for the "No buildable providers available for required...."
+	  error message.
+	- Rework add_task to be more efficient (6% speedup, 7% number of function calls reduction)
+	- Sort digraph output to make builds more reproducible
+	- Split expandKeys into two for loops to benefit from the expand_cache (12% speedup)
+	- runqueue.py: Fix idepends handling to avoid dependency errors
+	- Clear the terminal TOSTOP flag if set (and warn the user)
+	- Fix regression from r653 and make SRCDATE/CVSDATE work for packages again
+	- Fix a bug in bb.decodeurl where http://some.where.com/somefile.tgz decoded to host="" (#1530)
+	- Warn about malformed PREFERRED_PROVIDERS (#1072)
+	- Add support for BB_NICE_LEVEL option (#1627)
+	- Psyco is used only on x86 as there is no support for other architectures.
+	- Sort initial providers list by default preference (#1145, #2024)
+	- Improve provider sorting so prefered versions have preference over latest versions (#768)
+	- Detect builds of tasks with overlapping providers and warn (will become a fatal error) (#1359)
+	- Add MULTI_PROVIDER_WHITELIST variable to allow known safe multiple providers to be listed
+	- Handle paths in svn fetcher module parameter
+	- Support the syntax "export VARIABLE"
+	- Add bzr fetcher
+	- Add support for cleaning directories before a task in the form:
+	  do_taskname[cleandirs] = "dir"
+	- bzr fetcher tweaks from Robert Schuster (#2913)
+	- Add mercurial (hg) fetcher from Robert Schuster (#2913)
+	- Don't add duplicates to BBPATH
+	- Fix preferred_version return values (providers.py)
+	- Fix 'depends' flag splitting
+	- Fix unexport handling (#3135)
+	- Add bb.copyfile function similar to bb.movefile (and improve movefile error reporting)
+	- Allow multiple options for deptask flag
+	- Use git-fetch instead of git-pull removing any need for merges when 
+	  fetching (we don't care about the index). Fixes fetch errors.
+	- Add BB_GENERATE_MIRROR_TARBALLS option, set to 0 to make git fetches 
+	  faster at the expense of not creating mirror tarballs.
+	- SRCREV handling updates, improvements and fixes from Poky
+	- Add bb.utils.lockfile() and bb.utils.unlockfile() from Poky
+	- Add support for task selfstamp and lockfiles flags
+	- Disable task number acceleration since it can allow the tasks to run 
+	  out of sequence
+	- Improve runqueue code comments
+	- Add task scheduler abstraction and some example schedulers
+	- Improve circular dependency chain debugging code and user feedback
+	- Don't give a stacktrace for invalid tasks, have a user friendly message (#3431)
+	- Add support for "-e target" (#3432)
+	- Fix shell showdata command (#3259)
+	- Fix shell data updating problems (#1880)
+	- Properly raise errors for invalid source URI protocols
+	- Change the wget fetcher failure handling to avoid lockfile problems
+	- Add support for branches in git fetcher (Otavio Salvador, Michael Lauer)
+	- Make taskdata and runqueue errors more user friendly
+	- Add norecurse and fullpath options to cvs fetcher
+	- Fix exit code for build failures in --continue mode
+	- Fix git branch tags fetching
+	- Change parseConfigurationFile so it works on real data, not a copy
+	- Handle 'base' inherit and all other INHERITs from parseConfigurationFile 
+	  instead of BBHandler
+	- Fix getVarFlags bug in data_smart
+	- Optmise cache handling by more quickly detecting an invalid cache, only 
+	  saving the cache when its changed, moving the cache validity check into
+	  the parsing loop and factoring some getVar calls outside a for loop
+	- Cooker: Remove a debug message from the parsing loop to lower overhead
+	- Convert build.py exec_task to use getVarFlags
+	- Update shell to use cooker.buildFile
+	- Add StampUpdate event
+	- Convert -b option to use taskdata/runqueue
+	- Remove digraph and switch to new stamp checking code. exec_task no longer
+	  honours dependencies
+	- Make fetcher timestamp updating non-fatal when permissions don't allow 
+	  updates
+	- Add BB_SCHEDULER variable/option ("completion" or "speed") controlling
+	  the way bitbake schedules tasks
+	- Add BB_STAMP_POLICY variable/option ("perfile" or "full") controlling
+	  how extensively stamps are looked at for validity
+	- When handling build target failures make sure idepends are checked and
+	  failed where needed. Fixes --continue mode crashes.
+	- Fix -f (force) in conjunction with -b
+	- Fix problems with recrdeptask handling where some idepends weren't handled
+	  correctly.
+	- Handle exit codes correctly (from pH5)
+	- Work around refs/HEAD issues with git over http (#3410)
+	- Add proxy support to the CVS fetcher (from Cyril Chemparathy)
+	- Improve runfetchcmd so errors are seen and various GIT variables are exported
+	- Add ability to fetchers to check URL validity without downloading
+	- Improve runtime PREFERRED_PROVIDERS warning message
+	- Add BB_STAMP_WHITELIST option which contains a list of stamps to ignore when
+	  checking stamp dependencies and using a BB_STAMP_POLICY of "whitelist"
+	- No longer weight providers on the basis of a package being "already staged". This
+	  leads to builds being non-deterministic.
+	- Flush stdout/stderr before forking to fix duplicate console output
+	- Make sure recrdeps tasks include all inter-task dependencies of a given fn
+	- Add bb.runqueue.check_stamp_fn() for use by packaged-staging
+	- Add PERSISTENT_DIR to store the PersistData in a persistent
+	  directory != the cache dir.
+	- Add md5 and sha256 checksum generation functions to utils.py
+	- Correctly handle '-' characters in class names (#2958)
+	- Make sure expandKeys has been called on the data dictionary before running tasks
+	- Correctly add a task override in the form task-TASKNAME.
+	- Revert the '-' character fix in class names since it breaks things
+	- When a regexp fails to compile for PACKAGES_DYNAMIC, print a more useful error (#4444)
+	- Allow to checkout CVS by Date and Time. Just add HHmm to the SRCDATE.
+	- Move prunedir function to utils.py and add explode_dep_versions function
+	- Raise an exception if SRCREV == 'INVALID'
+	- Fix hg fetcher username/password handling and fix crash
+	- Fix PACKAGES_DYNAMIC handling of packages with '++' in the name
+	- Rename __depends to __base_depends after configuration parsing so we don't
+	  recheck the validity of the config files time after time
+	- Add better environmental variable handling. By default it will now only pass certain 
+	  whitelisted variables into the data store. If BB_PRESERVE_ENV is set bitbake will use
+	  all variable from the environment. If BB_ENV_WHITELIST is set, that whitelist will be
+	  used instead of the internal bitbake one. Alternatively, BB_ENV_EXTRAWHITE can be used
+	  to extend the internal whitelist.
+	- Perforce fetcher fix to use commandline options instead of being overriden by the environment
+	- bb.utils.prunedir can cope with symlinks to directoriees without exceptions
+	- use @rev when doing a svn checkout
+	- Add osc fetcher (from Joshua Lock in Poky)
+	- When SRCREV autorevisioning for a recipe is in use, don't cache the recipe
+	- Add tryaltconfigs option to control whether bitbake trys using alternative providers
+	  to fulfil failed dependencies. It defaults to off, changing the default since this
+	  behaviour confuses many users and isn't often useful.
+	- Improve lock file function error handling
+	- Add username handling to the git fetcher (Robert Bragg)
+	- Add support for HTTP_PROXY and HTTP_PROXY_IGNORE variables to the wget fetcher
+	- Export more variables to the fetcher commands to allow ssh checkouts and checkouts through 
+	  proxies to work better. (from Poky)
+	- Also allow user and pswd options in SRC_URIs globally (from Poky)
+	- Improve proxy handling when using mirrors (from Poky)
+	- Add bb.utils.prune_suffix function
+	- Fix hg checkouts of specific revisions (from Poky)
+	- Fix wget fetching of urls with parameters specified (from Poky)
+	- Add username handling to git fetcher (from Poky)
+	- Set HOME environmental variable when running fetcher commands (from Poky)
+	- Make sure allowed variables inherited from the environment are exported again (from Poky)
+	- When running a stage task in bbshell, run populate_staging, not the stage task (from Poky)
+	- Fix + character escaping from PACKAGES_DYNAMIC (thanks Otavio Salvador)
+	- Addition of BBCLASSEXTEND support for allowing one recipe to provide multiple targets (from Poky)
+
+Changes in Bitbake 1.8.0:
+	- Release 1.7.x as a stable series
+
+Changes in BitBake 1.7.x:
+	- Major updates of the dependency handling and execution
+	  of tasks. Code from bin/bitbake replaced with runqueue.py
+	  and taskdata.py
+	- New task execution code supports multithreading with a simplistic
+	  threading algorithm controlled by BB_NUMBER_THREADS
+	- Change of the SVN Fetcher to keep the checkout around
+	  courtsey of Paul Sokolovsky (#1367)
+	- PATH fix to bbimage (#1108)
+	- Allow debug domains to be specified on the commandline (-l)
+	- Allow 'interactive' tasks
+	- Logging message improvements
+	- Drop now uneeded BUILD_ALL_DEPS variable
+	- Add support for wildcards to -b option
+	- Major overhaul of the fetchers making a large amount of code common
+	  including mirroring code
+	- Fetchers now touch md5 stamps upon access (to show activity)
+	- Fix -f force option when used without -b (long standing bug)
+	- Add expand_cache to data_cache.py, caching expanded data (speedup)
+	- Allow version field in DEPENDS (ignored for now)
+	- Add abort flag support to the shell
+	- Make inherit fail if the class doesn't exist (#1478)
+	- Fix data.emit_env() to expand keynames as well as values
+	- Add ssh fetcher
+	- Add perforce fetcher
+	- Make PREFERRED_PROVIDER_foobar defaults to foobar if available
+	- Share the parser's mtime_cache, reducing the number of stat syscalls
+	- Compile all anonfuncs at once! 
+	  *** Anonfuncs must now use common spacing format ***
+	- Memorise the list of handlers in __BBHANDLERS and tasks in __BBTASKS
+	  This removes 2 million function calls resulting in a 5-10% speedup
+	- Add manpage
+	- Update generateDotGraph to use taskData/runQueue improving accuracy
+	  and also adding a task dependency graph
+	- Fix/standardise on GPLv2 licence
+	- Move most functionality from bin/bitbake to cooker.py and split into
+	  separate funcitons
+	- CVS fetcher: Added support for non-default port
+	- Add BBINCLUDELOGS_LINES, the number of lines to read from any logfile
+	- Drop shebangs from lib/bb scripts
+
+Changes in Bitbake 1.6.0:
+	- Better msg handling
+	- COW dict implementation from Tim Ansell (mithro) leading
+	  to better performance
+	- Speed up of -s
+
+Changes in Bitbake 1.4.4:
+	- SRCDATE now handling courtsey Justin Patrin
+	- #1017 fix to work with rm_work
+
+Changes in BitBake 1.4.2:
+	- Send logs to oe.pastebin.com instead of pastebin.com
+	  fixes #856
+	- Copy the internal bitbake data before building the
+	  dependency graph. This fixes nano not having a
+	  virtual/libc dependency
+	- Allow multiple TARBALL_STASH entries
+	- Cache, check if the directory exists before changing
+	  into it
+	- git speedup cloning by not doing a checkout
+	- allow to have spaces in filenames (.conf, .bb, .bbclass)
+
+Changes in BitBake 1.4.0:
+	- Fix to check both RDEPENDS and RDEPENDS_${PN}
+	- Fix a RDEPENDS parsing bug in utils:explode_deps()
+	- Update git fetcher behaviour to match git changes
+	- ASSUME_PROVIDED allowed to include runtime packages
+	- git fetcher cleanup and efficency improvements
+	- Change the format of the cache
+	- Update usermanual to document the Fetchers
+	- Major changes to caching with a new strategy
+	  giving a major performance increase when reparsing
+	  with few data changes
+
+Changes in BitBake 1.3.3:
+	- Create a new Fetcher module to ease the
+	  development of new Fetchers.
+	  Issue #438 fixed by rpurdie@openedhand.com
+	- Make the Subversion fetcher honor the SRC Date
+	  (CVSDATE).
+	  Issue #555 fixed by chris@openedhand.com
+	- Expand PREFERRED_PROVIDER properly
+	  Issue #436 fixed by rprudie@openedhand.com
+	- Typo fix for Issue #531 by Philipp Zabel for the
+	  BitBake Shell
+	- Introduce a new special variable SRCDATE as
+	  a generic naming to replace CVSDATE.
+	- Introduce a new keyword 'required'. In contrast
+	  to 'include' parsing will fail if a to be included
+	  file can not be found.
+	- Remove hardcoding of the STAMP directory. Patch
+	  courtsey pHilipp Zabel
+	- Track the RDEPENDS of each package (rpurdie@openedhand.com)
+	- Introduce BUILD_ALL_DEPS to build all RDEPENDS. E.g
+	  this is used by the OpenEmbedded Meta Packages.
+	  (rpurdie@openedhand.com).
+
+Changes in BitBake 1.3.2:
+	- reintegration of make.py into BitBake
+	- bbread is gone, use bitbake -e
+	- lots of shell updates and bugfixes
+	- Introduction of the .= and =. operator
+	- Sort variables, keys and groups in bitdoc
+	- Fix regression in the handling of BBCOLLECTIONS
+	- Update the bitbake usermanual
+
+Changes in BitBake 1.3.0:
+	- add bitbake interactive shell (bitbake -i)
+	- refactor bitbake utility in OO style
+	- kill default arguments in methods in the bb.data module
+	- kill default arguments in methods in the bb.fetch module
+	- the http/https/ftp fetcher will fail if the to be 
+	  downloaded file was not found in DL_DIR (this is needed
+	  to avoid unpacking the sourceforge mirror page)
+	- Switch to a cow like data instance for persistent and non
+	  persisting mode (called data_smart.py)
+	- Changed the callback of bb.make.collect_bbfiles to carry
+	  additional parameters
+	- Drastically reduced the amount of needed RAM by not holding
+	  each data instance in memory when using a cache/persistent
+	  storage
+
+Changes in BitBake 1.2.1:
+	The 1.2.1 release is meant as a intermediate release to lay the
+	ground for more radical changes. The most notable changes are:
+
+	- Do not hardcode {}, use bb.data.init() instead if you want to
+	  get a instance of a data class
+	- bb.data.init() is a factory and the old bb.data methods are delegates
+	- Do not use deepcopy use bb.data.createCopy() instead.
+	- Removed default arguments in bb.fetch
+

+ 19 - 0
bitbake/HEADER

@@ -0,0 +1,19 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# <one line to give the program's name and a brief idea of what it does.>
+# Copyright (C) <year>  <name of author>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+

+ 10 - 0
bitbake/LICENSE

@@ -0,0 +1,10 @@
+BitBake is licensed under the GNU General Public License version 2.0. See COPYING for further details.
+
+The following external components are distributed with this software:
+
+* The Toaster Simple UI application is based upon the Django project template, the files of which are covered by the BSD license and are copyright (c) Django Software
+Foundation and individual contributors.
+
+* Twitter Bootstrap (including Glyphicons), redistributed under the Apache License 2.0.
+
+* jQuery is redistributed under the MIT license.

+ 11 - 0
bitbake/MANIFEST.in

@@ -0,0 +1,11 @@
+include COPYING
+include ChangeLog
+include AUTHORS
+include contrib/*
+include contrib/vim/*/*
+include conf/*
+include classes/*
+include doc/*
+include doc/manual/*
+include ez_setup.py
+include HEADER

+ 62 - 0
bitbake/TODO

@@ -0,0 +1,62 @@
+- Reimplement the interactive mode as a proper ui
+- Continue dropping fatal/SystemExit/sys.exit usage in favor of raising
+  appropriate exceptions
+- Continue pylint / pyflakes / pychecker / pep8 fixups
+- Drop os.system usage in favor of direct subprocess usage or a subprocess
+  wrapper
+- Kill the execution of 'tee' for the task log file in build.py
+- Fix up the exception handling
+
+  - Kill exec_task's catch of FuncFailed, instead catch it in the other
+    callers of exec_task/exec_func
+  - What exactly is the purpose of the "EventException"?  I can see using an
+    exception like that, *perhaps*, to abstract away exceptions raised by
+    event handlers, but it has no place in bb.build.exec_task
+
+- BUG: if you chmod 000 local.conf, it silently doesn't parse it, when it
+  should really fail, so the user can fix the problem.
+
+- Audit bb.fatal usage - these should all be able to be replaced with
+  exceptions
+- Figure out how to handle the ncurses UI.  Should some of our logging
+  formatting stuff be made common to all of bitbake, or perhaps all UIs via
+  the UIHelper?
+
+Long term, high impact:
+
+  - Change override application to actually *move* it over -- so the original
+    override specific version of the variable goes away, rather than sticking
+    around as a duplicate.
+  - Change the behavior when a variable is referenced and is unset.  Today, it
+    evaluates to ${FOO} and then shell has a chance to expand it, but this is
+    far from ideal.  We had considered evaluating it to the empty string, but
+    that has other potential problems.  Frans Meulenbroeks has proposed just
+    erroring when this occurs, as we can always define default values for the
+    variables in bitbake.conf.  This seems reasonable.  My only concern with
+    that is the case where you want to reference a shell variable with odd
+    characters in it -- where you'd have to use ${} style shell variable
+    expansion rather than normal $.  To handle that case, we'd really need a
+    way to escape / disable bitbake variable expansion, \${} perhaps.
+
+Uncertain:
+
+  - Leverage the python 2.6 multiprocessing module
+
+    - Worker processes for bb.cooker
+    - Server / UI processes
+
+  - Create a bitbake configuration class which is utilized by the library, not
+    just bin/bitbake.  This class should be responsible for extracting
+    configuration parameters from the metadata for bitbake internal use, as well
+    as pulling specific items like BBDEBUG, and importing settings from an
+    optparse options object.
+
+  - Python version bits
+
+    - Utilize the new string formatting where appropriate
+    - Do we need to take into account the bytes literals changes?
+    - Do we have any file-like objects that would benefit from using the "io"
+      module?
+    - Do we want to leverage the abstract base classes in collections?
+    - Aside: Set methods now accept multiple iterables
+

+ 53 - 0
bitbake/bin/bitbake

@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Copyright (C) 2003, 2004  Chris Larson
+# Copyright (C) 2003, 2004  Phil Blundell
+# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
+# Copyright (C) 2005        Holger Hans Peter Freyther
+# Copyright (C) 2005        ROAD GmbH
+# Copyright (C) 2006        Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import sys
+
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
+                                'lib'))
+try:
+    import bb
+except RuntimeError as exc:
+    sys.exit(str(exc))
+
+from bb import cookerdata
+from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
+
+__version__ = "1.29.0"
+
+if __name__ == "__main__":
+    if __version__ != bb.__version__:
+        sys.exit("Bitbake core version and program version mismatch!")
+    try:
+        sys.exit(bitbake_main(BitBakeConfigParameters(sys.argv),
+                              cookerdata.CookerConfiguration()))
+    except BBMainException as err:
+        sys.exit(err)
+    except bb.BBHandledException:
+        sys.exit(1)
+    except Exception:
+        import traceback
+        traceback.print_exc()
+        sys.exit(1)

+ 138 - 0
bitbake/bin/bitbake-diffsigs

@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+
+# bitbake-diffsigs
+# BitBake task signature data comparison utility
+#
+# Copyright (C) 2012-2013 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import sys
+import warnings
+import fnmatch
+import optparse
+import logging
+
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
+
+import bb.tinfoil
+import bb.siggen
+
+def logger_create(name, output=sys.stderr):
+    logger = logging.getLogger(name)
+    console = logging.StreamHandler(output)
+    format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
+    if output.isatty():
+        format.enable_color()
+    console.setFormatter(format)
+    logger.addHandler(console)
+    logger.setLevel(logging.INFO)
+    return logger
+
+logger = logger_create('bitbake-diffsigs')
+
+def find_compare_task(bbhandler, pn, taskname):
+    """ Find the most recent signature files for the specified PN/task and compare them """
+
+    def get_hashval(siginfo):
+        if siginfo.endswith('.siginfo'):
+            return siginfo.rpartition(':')[2].partition('_')[0]
+        else:
+            return siginfo.rpartition('.')[2]
+
+    if not hasattr(bb.siggen, 'find_siginfo'):
+        logger.error('Metadata does not support finding signature data files')
+        sys.exit(1)
+
+    if not taskname.startswith('do_'):
+        taskname = 'do_%s' % taskname
+
+    filedates = bb.siggen.find_siginfo(pn, taskname, None, bbhandler.config_data)
+    latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-3:]
+    if not latestfiles:
+        logger.error('No sigdata files found matching %s %s' % (pn, taskname))
+        sys.exit(1)
+    elif len(latestfiles) < 2:
+        logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (pn, taskname))
+        sys.exit(1)
+    else:
+        # It's possible that latestfiles contain 3 elements and the first two have the same hash value.
+        # In this case, we delete the second element.
+        # The above case is actually the most common one. Because we may have sigdata file and siginfo
+        # file having the same hash value. Comparing such two files makes no sense.
+        if len(latestfiles) == 3:
+            hash0 = get_hashval(latestfiles[0])
+            hash1 = get_hashval(latestfiles[1])
+            if hash0 == hash1:
+                latestfiles.pop(1)
+
+        # Define recursion callback
+        def recursecb(key, hash1, hash2):
+            hashes = [hash1, hash2]
+            hashfiles = bb.siggen.find_siginfo(key, None, hashes, bbhandler.config_data)
+
+            recout = []
+            if len(hashfiles) == 2:
+                out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb)
+                recout.extend(list('  ' + l for l in out2))
+            else:
+                recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
+
+            return recout
+
+        # Recurse into signature comparison
+        output = bb.siggen.compare_sigfiles(latestfiles[0], latestfiles[1], recursecb)
+        if output:
+            print '\n'.join(output)
+    sys.exit(0)
+
+
+
+parser = optparse.OptionParser(
+    description = "Compares siginfo/sigdata files written out by BitBake",
+    usage = """
+  %prog -t recipename taskname
+  %prog sigdatafile1 sigdatafile2
+  %prog sigdatafile1""")
+
+parser.add_option("-t", "--task",
+        help = "find the signature data files for last two runs of the specified task and compare them",
+        action="store", dest="taskargs", nargs=2, metavar='recipename taskname')
+
+options, args = parser.parse_args(sys.argv)
+
+if options.taskargs:
+    tinfoil = bb.tinfoil.Tinfoil()
+    tinfoil.prepare(config_only = True)
+    find_compare_task(tinfoil, options.taskargs[0], options.taskargs[1])
+else:
+    if len(args) == 1:
+        parser.print_help()
+    else:
+        import cPickle
+        try:
+            if len(args) == 2:
+                output = bb.siggen.dump_sigfile(sys.argv[1])
+            else:
+                output = bb.siggen.compare_sigfiles(sys.argv[1], sys.argv[2])
+        except IOError as e:
+            logger.error(str(e))
+            sys.exit(1)
+        except cPickle.UnpicklingError, EOFError:
+            logger.error('Invalid signature data - ensure you are specifying sigdata/siginfo files')
+            sys.exit(1)
+
+        if output:
+            print '\n'.join(output)

+ 65 - 0
bitbake/bin/bitbake-dumpsig

@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+
+# bitbake-dumpsig
+# BitBake task signature dump utility
+#
+# Copyright (C) 2013 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import sys
+import warnings
+import optparse
+import logging
+
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
+
+import bb.siggen
+
+def logger_create(name, output=sys.stderr):
+    logger = logging.getLogger(name)
+    console = logging.StreamHandler(output)
+    format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
+    if output.isatty():
+        format.enable_color()
+    console.setFormatter(format)
+    logger.addHandler(console)
+    logger.setLevel(logging.INFO)
+    return logger
+
+logger = logger_create('bitbake-dumpsig')
+
+parser = optparse.OptionParser(
+    description = "Dumps siginfo/sigdata files written out by BitBake",
+    usage = """
+  %prog sigdatafile""")
+
+options, args = parser.parse_args(sys.argv)
+
+if len(args) == 1:
+    parser.print_help()
+else:
+    import cPickle
+    try:
+        output = bb.siggen.dump_sigfile(args[1])
+    except IOError as e:
+        logger.error(str(e))
+        sys.exit(1)
+    except cPickle.UnpicklingError, EOFError:
+        logger.error('Invalid signature data - ensure you are specifying a sigdata/siginfo file')
+        sys.exit(1)
+
+    if output:
+        print '\n'.join(output)

+ 1072 - 0
bitbake/bin/bitbake-layers

@@ -0,0 +1,1072 @@
+#!/usr/bin/env python
+
+# This script has subcommands which operate against your bitbake layers, either
+# displaying useful information, or acting against them.
+# See the help output for details on available commands.
+
+# Copyright (C) 2011 Mentor Graphics Corporation
+# Copyright (C) 2011-2015 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import logging
+import os
+import sys
+import fnmatch
+from collections import defaultdict
+import argparse
+import re
+import httplib, urlparse, json
+import subprocess
+
+bindir = os.path.dirname(__file__)
+topdir = os.path.dirname(bindir)
+sys.path[0:0] = [os.path.join(topdir, 'lib')]
+
+import bb.cache
+import bb.cooker
+import bb.providers
+import bb.utils
+import bb.tinfoil
+
+
+def logger_create(name, output=sys.stderr):
+    logger = logging.getLogger(name)
+    console = logging.StreamHandler(output)
+    format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
+    if output.isatty():
+        format.enable_color()
+    console.setFormatter(format)
+    logger.addHandler(console)
+    logger.setLevel(logging.INFO)
+    return logger
+
+logger = logger_create('bitbake-layers', sys.stdout)
+
+class UserError(Exception):
+    pass
+
+class Commands():
+    def __init__(self):
+        self.bbhandler = None
+        self.bblayers = []
+
+    def init_bbhandler(self, config_only = False):
+        if not self.bbhandler:
+            self.bbhandler = bb.tinfoil.Tinfoil(tracking=True)
+            self.bblayers = (self.bbhandler.config_data.getVar('BBLAYERS', True) or "").split()
+            self.bbhandler.prepare(config_only)
+            layerconfs = self.bbhandler.config_data.varhistory.get_variable_items_files('BBFILE_COLLECTIONS', self.bbhandler.config_data)
+            self.bbfile_collections = {layer: os.path.dirname(os.path.dirname(path)) for layer, path in layerconfs.iteritems()}
+
+
+    def do_show_layers(self, args):
+        """show current configured layers"""
+        self.init_bbhandler(config_only = True)
+        logger.plain("%s  %s  %s" % ("layer".ljust(20), "path".ljust(40), "priority"))
+        logger.plain('=' * 74)
+        for layer, _, regex, pri in self.bbhandler.cooker.recipecache.bbfile_config_priorities:
+            layerdir = self.bbfile_collections.get(layer, None)
+            layername = self.get_layer_name(layerdir)
+            logger.plain("%s  %s  %d" % (layername.ljust(20), layerdir.ljust(40), pri))
+
+
+    def do_add_layer(self, args):
+        """Add a layer to bblayers.conf
+
+Adds the specified layer to bblayers.conf
+"""
+        layerdir = os.path.abspath(args.layerdir)
+        if not os.path.exists(layerdir):
+            sys.stderr.write("Specified layer directory doesn't exist\n")
+            return 1
+
+        layer_conf = os.path.join(layerdir, 'conf', 'layer.conf')
+        if not os.path.exists(layer_conf):
+            sys.stderr.write("Specified layer directory doesn't contain a conf/layer.conf file\n")
+            return 1
+
+        bblayers_conf = os.path.join('conf', 'bblayers.conf')
+        if not os.path.exists(bblayers_conf):
+            sys.stderr.write("Unable to find bblayers.conf\n")
+            return 1
+
+        (notadded, _) = bb.utils.edit_bblayers_conf(bblayers_conf, layerdir, None)
+        if notadded:
+            for item in notadded:
+                sys.stderr.write("Specified layer %s is already in BBLAYERS\n" % item)
+
+
+    def do_remove_layer(self, args):
+        """Remove a layer from bblayers.conf
+
+Removes the specified layer from bblayers.conf
+"""
+        bblayers_conf = os.path.join('conf', 'bblayers.conf')
+        if not os.path.exists(bblayers_conf):
+            sys.stderr.write("Unable to find bblayers.conf\n")
+            return 1
+
+        if args.layerdir.startswith('*'):
+            layerdir = args.layerdir
+        elif not '/' in args.layerdir:
+            layerdir = '*/%s' % args.layerdir
+        else:
+            layerdir = os.path.abspath(args.layerdir)
+        (_, notremoved) = bb.utils.edit_bblayers_conf(bblayers_conf, None, layerdir)
+        if notremoved:
+            for item in notremoved:
+                sys.stderr.write("No layers matching %s found in BBLAYERS\n" % item)
+            return 1
+
+
+    def get_json_data(self, apiurl):
+        proxy_settings = os.environ.get("http_proxy", None)
+        conn = None
+        _parsedurl = urlparse.urlparse(apiurl)
+        path = _parsedurl.path
+        query = _parsedurl.query
+        def parse_url(url):
+            parsedurl = urlparse.urlparse(url)
+            if parsedurl.netloc[0] == '[':
+                host, port = parsedurl.netloc[1:].split(']', 1)
+                if ':' in port:
+                    port = port.rsplit(':', 1)[1]
+                else:
+                    port = None
+            else:
+                if parsedurl.netloc.count(':') == 1:
+                    (host, port) = parsedurl.netloc.split(":")
+                else:
+                    host = parsedurl.netloc
+                    port = None
+            return (host, 80 if port is None else int(port))
+
+        if proxy_settings is None:
+            host, port = parse_url(apiurl)
+            conn = httplib.HTTPConnection(host, port)
+            conn.request("GET", path + "?" + query)
+        else:
+            host, port = parse_url(proxy_settings)
+            conn = httplib.HTTPConnection(host, port)
+            conn.request("GET", apiurl)
+
+        r = conn.getresponse()
+        if r.status != 200:
+            raise Exception("Failed to read " + path + ": %d %s" % (r.status, r.reason))
+        return json.loads(r.read())
+
+
+    def get_layer_deps(self, layername, layeritems, layerbranches, layerdependencies, branchnum, selfname=False):
+        def layeritems_info_id(items_name, layeritems):
+            litems_id = None
+            for li in layeritems:
+                if li['name'] == items_name:
+                    litems_id = li['id']
+                    break
+            return litems_id
+
+        def layerbranches_info(items_id, layerbranches):
+            lbranch = {}
+            for lb in layerbranches:
+                if lb['layer'] == items_id and lb['branch'] == branchnum:
+                    lbranch['id'] = lb['id']
+                    lbranch['vcs_subdir'] = lb['vcs_subdir']
+                    break
+            return lbranch
+
+        def layerdependencies_info(lb_id, layerdependencies):
+            ld_deps = []
+            for ld in layerdependencies:
+                if ld['layerbranch'] == lb_id and not ld['dependency'] in ld_deps:
+                    ld_deps.append(ld['dependency'])
+            if not ld_deps:
+                logger.error("The dependency of layerDependencies is not found.")
+            return ld_deps
+
+        def layeritems_info_name_subdir(items_id, layeritems):
+            litems = {}
+            for li in layeritems:
+                if li['id'] == items_id:
+                    litems['vcs_url'] = li['vcs_url']
+                    litems['name'] = li['name']
+                    break
+            return litems
+
+        if selfname:
+            selfid = layeritems_info_id(layername, layeritems)
+            lbinfo = layerbranches_info(selfid, layerbranches)
+            if lbinfo:
+                selfsubdir = lbinfo['vcs_subdir']
+            else:
+                logger.error("%s is not found in the specified branch" % layername)
+                return
+            selfurl = layeritems_info_name_subdir(selfid, layeritems)['vcs_url']
+            if selfurl:
+                return selfurl, selfsubdir
+            else:
+                logger.error("Cannot get layer %s git repo and subdir" % layername)
+                return
+        ldict = {}
+        itemsid = layeritems_info_id(layername, layeritems)
+        if not itemsid:
+            return layername, None
+        lbid = layerbranches_info(itemsid, layerbranches)
+        if lbid:
+            lbid = layerbranches_info(itemsid, layerbranches)['id']
+        else:
+            logger.error("%s is not found in the specified branch" % layername)
+            return None, None
+        for dependency in layerdependencies_info(lbid, layerdependencies):
+            lname = layeritems_info_name_subdir(dependency, layeritems)['name']
+            lurl = layeritems_info_name_subdir(dependency, layeritems)['vcs_url']
+            lsubdir = layerbranches_info(dependency, layerbranches)['vcs_subdir']
+            ldict[lname] = lurl, lsubdir
+        return None, ldict
+
+
+    def get_fetch_layer(self, fetchdir, url, subdir, fetch_layer):
+        layername = self.get_layer_name(url)
+        if os.path.splitext(layername)[1] == '.git':
+            layername = os.path.splitext(layername)[0]
+        repodir = os.path.join(fetchdir, layername)
+        layerdir = os.path.join(repodir, subdir)
+        if not os.path.exists(repodir):
+            if fetch_layer:
+                result = subprocess.call('git clone %s %s' % (url, repodir), shell = True)
+                if result:
+                    logger.error("Failed to download %s" % url)
+                    return None, None
+                else:
+                    return layername, layerdir
+            else:
+                logger.plain("Repository %s needs to be fetched" % url)
+                return layername, layerdir
+        elif os.path.exists(layerdir):
+            return layername, layerdir
+        else:
+            logger.error("%s is not in %s" % (url, subdir))
+        return None, None
+
+
+    def do_layerindex_fetch(self, args):
+        """Fetches a layer from a layer index along with its dependent layers, and adds them to conf/bblayers.conf.
+"""
+        self.init_bbhandler(config_only = True)
+        apiurl = self.bbhandler.config_data.getVar('BBLAYERS_LAYERINDEX_URL', True)
+        if not apiurl:
+            logger.error("Cannot get BBLAYERS_LAYERINDEX_URL")
+            return 1
+        else:
+            if apiurl[-1] != '/':
+                apiurl += '/'
+            apiurl += "api/"
+        apilinks = self.get_json_data(apiurl)
+        branches = self.get_json_data(apilinks['branches'])
+
+        branchnum = 0
+        for branch in branches:
+            if branch['name'] == args.branch:
+                branchnum = branch['id']
+                break
+        if branchnum == 0:
+            validbranches = ', '.join([branch['name'] for branch in branches])
+            logger.error('Invalid layer branch name "%s". Valid branches: %s' % (args.branch, validbranches))
+            return 1
+
+        ignore_layers = []
+        for collection in self.bbhandler.config_data.getVar('BBFILE_COLLECTIONS', True).split():
+            lname = self.bbhandler.config_data.getVar('BBLAYERS_LAYERINDEX_NAME_%s' % collection, True)
+            if lname:
+                ignore_layers.append(lname)
+
+        if args.ignore:
+            ignore_layers.extend(args.ignore.split(','))
+
+        layeritems = self.get_json_data(apilinks['layerItems'])
+        layerbranches = self.get_json_data(apilinks['layerBranches'])
+        layerdependencies = self.get_json_data(apilinks['layerDependencies'])
+        invaluenames = []
+        repourls = {}
+        printlayers = []
+        def query_dependencies(layers, layeritems, layerbranches, layerdependencies, branchnum):
+            depslayer = []
+            for layername in layers:
+                invaluename, layerdict = self.get_layer_deps(layername, layeritems, layerbranches, layerdependencies, branchnum)
+                if layerdict:
+                    repourls[layername] = self.get_layer_deps(layername, layeritems, layerbranches, layerdependencies, branchnum, selfname=True)
+                    for layer in layerdict:
+                        if not layer in ignore_layers:
+                            depslayer.append(layer)
+                        printlayers.append((layername, layer, layerdict[layer][0], layerdict[layer][1]))
+                        if not layer in ignore_layers and not layer in repourls:
+                            repourls[layer] = (layerdict[layer][0], layerdict[layer][1])
+                if invaluename and not invaluename in invaluenames:
+                    invaluenames.append(invaluename)
+            return depslayer
+
+        depslayers = query_dependencies(args.layername, layeritems, layerbranches, layerdependencies, branchnum)
+        while depslayers:
+            depslayer = query_dependencies(depslayers, layeritems, layerbranches, layerdependencies, branchnum)
+            depslayers = depslayer
+        if invaluenames:
+            for invaluename in invaluenames:
+                logger.error('Layer "%s" not found in layer index' % invaluename)
+            return 1
+        logger.plain("%s  %s  %s  %s" % ("Layer".ljust(19), "Required by".ljust(19), "Git repository".ljust(54), "Subdirectory"))
+        logger.plain('=' * 115)
+        for layername in args.layername:
+            layerurl = repourls[layername]
+            logger.plain("%s %s %s %s" % (layername.ljust(20), '-'.ljust(20), layerurl[0].ljust(55), layerurl[1]))
+        printedlayers = []
+        for layer, dependency, gitrepo, subdirectory in printlayers:
+            if dependency in printedlayers:
+                continue
+            logger.plain("%s %s %s %s" % (dependency.ljust(20), layer.ljust(20), gitrepo.ljust(55), subdirectory))
+            printedlayers.append(dependency)
+
+        if repourls:
+            fetchdir = self.bbhandler.config_data.getVar('BBLAYERS_FETCH_DIR', True)
+            if not fetchdir:
+                logger.error("Cannot get BBLAYERS_FETCH_DIR")
+                return 1
+            if not os.path.exists(fetchdir):
+                os.makedirs(fetchdir)
+            addlayers = []
+            for repourl, subdir in repourls.values():
+                name, layerdir = self.get_fetch_layer(fetchdir, repourl, subdir, not args.show_only)
+                if not name:
+                    # Error already shown
+                    return 1
+                addlayers.append((subdir, name, layerdir))
+        if not args.show_only:
+            for subdir, name, layerdir in set(addlayers):
+                if os.path.exists(layerdir):
+                    if subdir:
+                        logger.plain("Adding layer \"%s\" to conf/bblayers.conf" % subdir)
+                    else:
+                        logger.plain("Adding layer \"%s\" to conf/bblayers.conf" % name)
+                    localargs = argparse.Namespace()
+                    localargs.layerdir = layerdir
+                    self.do_add_layer(localargs)
+                else:
+                    break
+
+
+    def do_layerindex_show_depends(self, args):
+        """Find layer dependencies from layer index.
+"""
+        args.show_only = True
+        args.ignore = []
+        self.do_layerindex_fetch(args)
+
+
+    def version_str(self, pe, pv, pr = None):
+        verstr = "%s" % pv
+        if pr:
+            verstr = "%s-%s" % (verstr, pr)
+        if pe:
+            verstr = "%s:%s" % (pe, verstr)
+        return verstr
+
+
+    def do_show_overlayed(self, args):
+        """list overlayed recipes (where the same recipe exists in another layer)
+
+Lists the names of overlayed recipes and the available versions in each
+layer, with the preferred version first. Note that skipped recipes that
+are overlayed will also be listed, with a " (skipped)" suffix.
+"""
+        self.init_bbhandler()
+
+        items_listed = self.list_recipes('Overlayed recipes', None, True, args.same_version, args.filenames, True, None)
+
+        # Check for overlayed .bbclass files
+        classes = defaultdict(list)
+        for layerdir in self.bblayers:
+            classdir = os.path.join(layerdir, 'classes')
+            if os.path.exists(classdir):
+                for classfile in os.listdir(classdir):
+                    if os.path.splitext(classfile)[1] == '.bbclass':
+                        classes[classfile].append(classdir)
+
+        # Locating classes and other files is a bit more complicated than recipes -
+        # layer priority is not a factor; instead BitBake uses the first matching
+        # file in BBPATH, which is manipulated directly by each layer's
+        # conf/layer.conf in turn, thus the order of layers in bblayers.conf is a
+        # factor - however, each layer.conf is free to either prepend or append to
+        # BBPATH (or indeed do crazy stuff with it). Thus the order in BBPATH might
+        # not be exactly the order present in bblayers.conf either.
+        bbpath = str(self.bbhandler.config_data.getVar('BBPATH', True))
+        overlayed_class_found = False
+        for (classfile, classdirs) in classes.items():
+            if len(classdirs) > 1:
+                if not overlayed_class_found:
+                    logger.plain('=== Overlayed classes ===')
+                    overlayed_class_found = True
+
+                mainfile = bb.utils.which(bbpath, os.path.join('classes', classfile))
+                if args.filenames:
+                    logger.plain('%s' % mainfile)
+                else:
+                    # We effectively have to guess the layer here
+                    logger.plain('%s:' % classfile)
+                    mainlayername = '?'
+                    for layerdir in self.bblayers:
+                        classdir = os.path.join(layerdir, 'classes')
+                        if mainfile.startswith(classdir):
+                            mainlayername = self.get_layer_name(layerdir)
+                    logger.plain('  %s' % mainlayername)
+                for classdir in classdirs:
+                    fullpath = os.path.join(classdir, classfile)
+                    if fullpath != mainfile:
+                        if args.filenames:
+                            print('  %s' % fullpath)
+                        else:
+                            print('  %s' % self.get_layer_name(os.path.dirname(classdir)))
+
+        if overlayed_class_found:
+            items_listed = True;
+
+        if not items_listed:
+            logger.plain('No overlayed files found.')
+
+
+    def do_show_recipes(self, args):
+        """list available recipes, showing the layer they are provided by
+
+Lists the names of recipes and the available versions in each
+layer, with the preferred version first. Optionally you may specify
+pnspec to match a specified recipe name (supports wildcards). Note that
+skipped recipes will also be listed, with a " (skipped)" suffix.
+"""
+        self.init_bbhandler()
+
+        inheritlist = args.inherits.split(',') if args.inherits else []
+        if inheritlist or args.pnspec or args.multiple:
+            title = 'Matching recipes:'
+        else:
+            title = 'Available recipes:'
+        self.list_recipes(title, args.pnspec, False, False, args.filenames, args.multiple, inheritlist)
+
+
+    def list_recipes(self, title, pnspec, show_overlayed_only, show_same_ver_only, show_filenames, show_multi_provider_only, inherits):
+        if inherits:
+            bbpath = str(self.bbhandler.config_data.getVar('BBPATH', True))
+            for classname in inherits:
+                classfile = 'classes/%s.bbclass' % classname
+                if not bb.utils.which(bbpath, classfile, history=False):
+                    raise UserError('No class named %s found in BBPATH' % classfile)
+
+        pkg_pn = self.bbhandler.cooker.recipecache.pkg_pn
+        (latest_versions, preferred_versions) = bb.providers.findProviders(self.bbhandler.config_data, self.bbhandler.cooker.recipecache, pkg_pn)
+        allproviders = bb.providers.allProviders(self.bbhandler.cooker.recipecache)
+
+        # Ensure we list skipped recipes
+        # We are largely guessing about PN, PV and the preferred version here,
+        # but we have no choice since skipped recipes are not fully parsed
+        skiplist = self.bbhandler.cooker.skiplist.keys()
+        skiplist.sort( key=lambda fileitem: self.bbhandler.cooker.collection.calc_bbfile_priority(fileitem) )
+        skiplist.reverse()
+        for fn in skiplist:
+            recipe_parts = os.path.splitext(os.path.basename(fn))[0].split('_')
+            p = recipe_parts[0]
+            if len(recipe_parts) > 1:
+                ver = (None, recipe_parts[1], None)
+            else:
+                ver = (None, 'unknown', None)
+            allproviders[p].append((ver, fn))
+            if not p in pkg_pn:
+                pkg_pn[p] = 'dummy'
+                preferred_versions[p] = (ver, fn)
+
+        def print_item(f, pn, ver, layer, ispref):
+            if f in skiplist:
+                skipped = ' (skipped)'
+            else:
+                skipped = ''
+            if show_filenames:
+                if ispref:
+                    logger.plain("%s%s", f, skipped)
+                else:
+                    logger.plain("  %s%s", f, skipped)
+            else:
+                if ispref:
+                    logger.plain("%s:", pn)
+                logger.plain("  %s %s%s", layer.ljust(20), ver, skipped)
+
+        global_inherit = (self.bbhandler.config_data.getVar('INHERIT', True) or "").split()
+        cls_re = re.compile('classes/')
+
+        preffiles = []
+        items_listed = False
+        for p in sorted(pkg_pn):
+            if pnspec:
+                if not fnmatch.fnmatch(p, pnspec):
+                    continue
+
+            if len(allproviders[p]) > 1 or not show_multi_provider_only:
+                pref = preferred_versions[p]
+                realfn = bb.cache.Cache.virtualfn2realfn(pref[1])
+                preffile = realfn[0]
+
+                # We only display once per recipe, we should prefer non extended versions of the
+                # recipe if present (so e.g. in OpenEmbedded, openssl rather than nativesdk-openssl
+                # which would otherwise sort first).
+                if realfn[1] and realfn[0] in self.bbhandler.cooker.recipecache.pkg_fn:
+                    continue
+
+                if inherits:
+                    matchcount = 0
+                    recipe_inherits = self.bbhandler.cooker_data.inherits.get(preffile, [])
+                    for cls in recipe_inherits:
+                        if cls_re.match(cls):
+                            continue
+                        classname = os.path.splitext(os.path.basename(cls))[0]
+                        if classname in global_inherit:
+                            continue
+                        elif classname in inherits:
+                            matchcount += 1
+                    if matchcount != len(inherits):
+                        # No match - skip this recipe
+                        continue
+
+                if preffile not in preffiles:
+                    preflayer = self.get_file_layer(preffile)
+                    multilayer = False
+                    same_ver = True
+                    provs = []
+                    for prov in allproviders[p]:
+                        provfile = bb.cache.Cache.virtualfn2realfn(prov[1])[0]
+                        provlayer = self.get_file_layer(provfile)
+                        provs.append((provfile, provlayer, prov[0]))
+                        if provlayer != preflayer:
+                            multilayer = True
+                        if prov[0] != pref[0]:
+                            same_ver = False
+
+                    if (multilayer or not show_overlayed_only) and (same_ver or not show_same_ver_only):
+                        if not items_listed:
+                            logger.plain('=== %s ===' % title)
+                            items_listed = True
+                        print_item(preffile, p, self.version_str(pref[0][0], pref[0][1]), preflayer, True)
+                        for (provfile, provlayer, provver) in provs:
+                            if provfile != preffile:
+                                print_item(provfile, p, self.version_str(provver[0], provver[1]), provlayer, False)
+                        # Ensure we don't show two entries for BBCLASSEXTENDed recipes
+                        preffiles.append(preffile)
+
+        return items_listed
+
+
+    def do_flatten(self, args):
+        """flatten layer configuration into a separate output directory.
+
+Takes the specified layers (or all layers in the current layer
+configuration if none are specified) and builds a "flattened" directory
+containing the contents of all layers, with any overlayed recipes removed
+and bbappends appended to the corresponding recipes. Note that some manual
+cleanup may still be necessary afterwards, in particular:
+
+* where non-recipe files (such as patches) are overwritten (the flatten
+  command will show a warning for these)
+* where anything beyond the normal layer setup has been added to
+  layer.conf (only the lowest priority number layer's layer.conf is used)
+* overridden/appended items from bbappends will need to be tidied up
+* when the flattened layers do not have the same directory structure (the
+  flatten command should show a warning when this will cause a problem)
+
+Warning: if you flatten several layers where another layer is intended to
+be used "inbetween" them (in layer priority order) such that recipes /
+bbappends in the layers interact, and then attempt to use the new output
+layer together with that other layer, you may no longer get the same
+build results (as the layer priority order has effectively changed).
+"""
+        if len(args.layer) == 1:
+            logger.error('If you specify layers to flatten you must specify at least two')
+            return 1
+
+        outputdir = args.outputdir
+        if os.path.exists(outputdir) and os.listdir(outputdir):
+            logger.error('Directory %s exists and is non-empty, please clear it out first' % outputdir)
+            return 1
+
+        self.init_bbhandler()
+        layers = self.bblayers
+        if len(args.layer) > 2:
+            layernames = args.layer
+            found_layernames = []
+            found_layerdirs = []
+            for layerdir in layers:
+                layername = self.get_layer_name(layerdir)
+                if layername in layernames:
+                    found_layerdirs.append(layerdir)
+                    found_layernames.append(layername)
+
+            for layername in layernames:
+                if not layername in found_layernames:
+                    logger.error('Unable to find layer %s in current configuration, please run "%s show-layers" to list configured layers' % (layername, os.path.basename(sys.argv[0])))
+                    return
+            layers = found_layerdirs
+        else:
+            layernames = []
+
+        # Ensure a specified path matches our list of layers
+        def layer_path_match(path):
+            for layerdir in layers:
+                if path.startswith(os.path.join(layerdir, '')):
+                    return layerdir
+            return None
+
+        applied_appends = []
+        for layer in layers:
+            overlayed = []
+            for f in self.bbhandler.cooker.collection.overlayed.iterkeys():
+                for of in self.bbhandler.cooker.collection.overlayed[f]:
+                    if of.startswith(layer):
+                        overlayed.append(of)
+
+            logger.plain('Copying files from %s...' % layer )
+            for root, dirs, files in os.walk(layer):
+                for f1 in files:
+                    f1full = os.sep.join([root, f1])
+                    if f1full in overlayed:
+                        logger.plain('  Skipping overlayed file %s' % f1full )
+                    else:
+                        ext = os.path.splitext(f1)[1]
+                        if ext != '.bbappend':
+                            fdest = f1full[len(layer):]
+                            fdest = os.path.normpath(os.sep.join([outputdir,fdest]))
+                            bb.utils.mkdirhier(os.path.dirname(fdest))
+                            if os.path.exists(fdest):
+                                if f1 == 'layer.conf' and root.endswith('/conf'):
+                                    logger.plain('  Skipping layer config file %s' % f1full )
+                                    continue
+                                else:
+                                    logger.warn('Overwriting file %s', fdest)
+                            bb.utils.copyfile(f1full, fdest)
+                            if ext == '.bb':
+                                for append in self.bbhandler.cooker.collection.get_file_appends(f1full):
+                                    if layer_path_match(append):
+                                        logger.plain('  Applying append %s to %s' % (append, fdest))
+                                        self.apply_append(append, fdest)
+                                        applied_appends.append(append)
+
+        # Take care of when some layers are excluded and yet we have included bbappends for those recipes
+        for b in self.bbhandler.cooker.collection.bbappends:
+            (recipename, appendname) = b
+            if appendname not in applied_appends:
+                first_append = None
+                layer = layer_path_match(appendname)
+                if layer:
+                    if first_append:
+                        self.apply_append(appendname, first_append)
+                    else:
+                        fdest = appendname[len(layer):]
+                        fdest = os.path.normpath(os.sep.join([outputdir,fdest]))
+                        bb.utils.mkdirhier(os.path.dirname(fdest))
+                        bb.utils.copyfile(appendname, fdest)
+                        first_append = fdest
+
+        # Get the regex for the first layer in our list (which is where the conf/layer.conf file will
+        # have come from)
+        first_regex = None
+        layerdir = layers[0]
+        for layername, pattern, regex, _ in self.bbhandler.cooker.recipecache.bbfile_config_priorities:
+            if regex.match(os.path.join(layerdir, 'test')):
+                first_regex = regex
+                break
+
+        if first_regex:
+            # Find the BBFILES entries that match (which will have come from this conf/layer.conf file)
+            bbfiles = str(self.bbhandler.config_data.getVar('BBFILES', True)).split()
+            bbfiles_layer = []
+            for item in bbfiles:
+                if first_regex.match(item):
+                    newpath = os.path.join(outputdir, item[len(layerdir)+1:])
+                    bbfiles_layer.append(newpath)
+
+            if bbfiles_layer:
+                # Check that all important layer files match BBFILES
+                for root, dirs, files in os.walk(outputdir):
+                    for f1 in files:
+                        ext = os.path.splitext(f1)[1]
+                        if ext in ['.bb', '.bbappend']:
+                            f1full = os.sep.join([root, f1])
+                            entry_found = False
+                            for item in bbfiles_layer:
+                                if fnmatch.fnmatch(f1full, item):
+                                    entry_found = True
+                                    break
+                            if not entry_found:
+                                logger.warning("File %s does not match the flattened layer's BBFILES setting, you may need to edit conf/layer.conf or move the file elsewhere" % f1full)
+
+    def get_file_layer(self, filename):
+        layerdir = self.get_file_layerdir(filename)
+        if layerdir:
+            return self.get_layer_name(layerdir)
+        else:
+            return '?'
+
+    def get_file_layerdir(self, filename):
+        layer = bb.utils.get_file_layer(filename, self.bbhandler.config_data)
+        return self.bbfile_collections.get(layer, None)
+
+    def remove_layer_prefix(self, f):
+        """Remove the layer_dir prefix, e.g., f = /path/to/layer_dir/foo/blah, the
+           return value will be: layer_dir/foo/blah"""
+        f_layerdir = self.get_file_layerdir(f)
+        if not f_layerdir:
+            return f
+        prefix = os.path.join(os.path.dirname(f_layerdir), '')
+        return f[len(prefix):] if f.startswith(prefix) else f
+
+    def get_layer_name(self, layerdir):
+        return os.path.basename(layerdir.rstrip(os.sep))
+
+    def apply_append(self, appendname, recipename):
+        with open(appendname, 'r') as appendfile:
+            with open(recipename, 'a') as recipefile:
+                recipefile.write('\n')
+                recipefile.write('##### bbappended from %s #####\n' % self.get_file_layer(appendname))
+                recipefile.writelines(appendfile.readlines())
+
+    def do_show_appends(self, args):
+        """list bbappend files and recipe files they apply to
+
+Lists recipes with the bbappends that apply to them as subitems.
+"""
+        self.init_bbhandler()
+
+        logger.plain('=== Appended recipes ===')
+
+        pnlist = list(self.bbhandler.cooker_data.pkg_pn.keys())
+        pnlist.sort()
+        appends = False
+        for pn in pnlist:
+            if self.show_appends_for_pn(pn):
+                appends = True
+
+        if self.show_appends_for_skipped():
+            appends = True
+
+        if not appends:
+            logger.plain('No append files found')
+
+    def show_appends_for_pn(self, pn):
+        filenames = self.bbhandler.cooker_data.pkg_pn[pn]
+
+        best = bb.providers.findBestProvider(pn,
+                                             self.bbhandler.config_data,
+                                             self.bbhandler.cooker_data,
+                                             self.bbhandler.cooker_data.pkg_pn)
+        best_filename = os.path.basename(best[3])
+
+        return self.show_appends_output(filenames, best_filename)
+
+    def show_appends_for_skipped(self):
+        filenames = [os.path.basename(f)
+                    for f in self.bbhandler.cooker.skiplist.iterkeys()]
+        return self.show_appends_output(filenames, None, " (skipped)")
+
+    def show_appends_output(self, filenames, best_filename, name_suffix = ''):
+        appended, missing = self.get_appends_for_files(filenames)
+        if appended:
+            for basename, appends in appended:
+                logger.plain('%s%s:', basename, name_suffix)
+                for append in appends:
+                    logger.plain('  %s', append)
+
+            if best_filename:
+                if best_filename in missing:
+                    logger.warn('%s: missing append for preferred version',
+                                best_filename)
+            return True
+        else:
+            return False
+
+    def get_appends_for_files(self, filenames):
+        appended, notappended = [], []
+        for filename in filenames:
+            _, cls = bb.cache.Cache.virtualfn2realfn(filename)
+            if cls:
+                continue
+
+            basename = os.path.basename(filename)
+            appends = self.bbhandler.cooker.collection.get_file_appends(basename)
+            if appends:
+                appended.append((basename, list(appends)))
+            else:
+                notappended.append(basename)
+        return appended, notappended
+
+    def do_show_cross_depends(self, args):
+        """Show dependencies between recipes that cross layer boundaries.
+
+Figure out the dependencies between recipes that cross layer boundaries.
+
+NOTE: .bbappend files can impact the dependencies.
+"""
+        ignore_layers = (args.ignore or '').split(',')
+
+        self.init_bbhandler()
+
+        pkg_fn = self.bbhandler.cooker_data.pkg_fn
+        bbpath = str(self.bbhandler.config_data.getVar('BBPATH', True))
+        self.require_re = re.compile(r"require\s+(.+)")
+        self.include_re = re.compile(r"include\s+(.+)")
+        self.inherit_re = re.compile(r"inherit\s+(.+)")
+
+        global_inherit = (self.bbhandler.config_data.getVar('INHERIT', True) or "").split()
+
+        # The bb's DEPENDS and RDEPENDS
+        for f in pkg_fn:
+            f = bb.cache.Cache.virtualfn2realfn(f)[0]
+            # Get the layername that the file is in
+            layername = self.get_file_layer(f)
+
+            # The DEPENDS
+            deps = self.bbhandler.cooker_data.deps[f]
+            for pn in deps:
+                if pn in self.bbhandler.cooker_data.pkg_pn:
+                    best = bb.providers.findBestProvider(pn,
+                            self.bbhandler.config_data,
+                            self.bbhandler.cooker_data,
+                            self.bbhandler.cooker_data.pkg_pn)
+                    self.check_cross_depends("DEPENDS", layername, f, best[3], args.filenames, ignore_layers)
+
+            # The RDPENDS
+            all_rdeps = self.bbhandler.cooker_data.rundeps[f].values()
+            # Remove the duplicated or null one.
+            sorted_rdeps = {}
+            # The all_rdeps is the list in list, so we need two for loops
+            for k1 in all_rdeps:
+                for k2 in k1:
+                    sorted_rdeps[k2] = 1
+            all_rdeps = sorted_rdeps.keys()
+            for rdep in all_rdeps:
+                all_p = bb.providers.getRuntimeProviders(self.bbhandler.cooker_data, rdep)
+                if all_p:
+                    if f in all_p:
+                        # The recipe provides this one itself, ignore
+                        continue
+                    best = bb.providers.filterProvidersRunTime(all_p, rdep,
+                                    self.bbhandler.config_data,
+                                    self.bbhandler.cooker_data)[0][0]
+                    self.check_cross_depends("RDEPENDS", layername, f, best, args.filenames, ignore_layers)
+
+            # The RRECOMMENDS
+            all_rrecs = self.bbhandler.cooker_data.runrecs[f].values()
+            # Remove the duplicated or null one.
+            sorted_rrecs = {}
+            # The all_rrecs is the list in list, so we need two for loops
+            for k1 in all_rrecs:
+                for k2 in k1:
+                    sorted_rrecs[k2] = 1
+            all_rrecs = sorted_rrecs.keys()
+            for rrec in all_rrecs:
+                all_p = bb.providers.getRuntimeProviders(self.bbhandler.cooker_data, rrec)
+                if all_p:
+                    if f in all_p:
+                        # The recipe provides this one itself, ignore
+                        continue
+                    best = bb.providers.filterProvidersRunTime(all_p, rrec,
+                                    self.bbhandler.config_data,
+                                    self.bbhandler.cooker_data)[0][0]
+                    self.check_cross_depends("RRECOMMENDS", layername, f, best, args.filenames, ignore_layers)
+
+            # The inherit class
+            cls_re = re.compile('classes/')
+            if f in self.bbhandler.cooker_data.inherits:
+                inherits = self.bbhandler.cooker_data.inherits[f]
+                for cls in inherits:
+                    # The inherits' format is [classes/cls, /path/to/classes/cls]
+                    # ignore the classes/cls.
+                    if not cls_re.match(cls):
+                        classname = os.path.splitext(os.path.basename(cls))[0]
+                        if classname in global_inherit:
+                            continue
+                        inherit_layername = self.get_file_layer(cls)
+                        if inherit_layername != layername and not inherit_layername in ignore_layers:
+                            if not args.filenames:
+                                f_short = self.remove_layer_prefix(f)
+                                cls = self.remove_layer_prefix(cls)
+                            else:
+                                f_short = f
+                            logger.plain("%s inherits %s" % (f_short, cls))
+
+            # The 'require/include xxx' in the bb file
+            pv_re = re.compile(r"\${PV}")
+            with open(f, 'r') as fnfile:
+                line = fnfile.readline()
+                while line:
+                    m, keyword = self.match_require_include(line)
+                    # Found the 'require/include xxxx'
+                    if m:
+                        needed_file = m.group(1)
+                        # Replace the ${PV} with the real PV
+                        if pv_re.search(needed_file) and f in self.bbhandler.cooker_data.pkg_pepvpr:
+                            pv = self.bbhandler.cooker_data.pkg_pepvpr[f][1]
+                            needed_file = re.sub(r"\${PV}", pv, needed_file)
+                        self.print_cross_files(bbpath, keyword, layername, f, needed_file, args.filenames, ignore_layers)
+                    line = fnfile.readline()
+
+        # The "require/include xxx" in conf/machine/*.conf, .inc and .bbclass
+        conf_re = re.compile(".*/conf/machine/[^\/]*\.conf$")
+        inc_re = re.compile(".*\.inc$")
+        # The "inherit xxx" in .bbclass
+        bbclass_re = re.compile(".*\.bbclass$")
+        for layerdir in self.bblayers:
+            layername = self.get_layer_name(layerdir)
+            for dirpath, dirnames, filenames in os.walk(layerdir):
+                for name in filenames:
+                    f = os.path.join(dirpath, name)
+                    s = conf_re.match(f) or inc_re.match(f) or bbclass_re.match(f)
+                    if s:
+                        with open(f, 'r') as ffile:
+                            line = ffile.readline()
+                            while line:
+                                m, keyword = self.match_require_include(line)
+                                # Only bbclass has the "inherit xxx" here.
+                                bbclass=""
+                                if not m and f.endswith(".bbclass"):
+                                    m, keyword = self.match_inherit(line)
+                                    bbclass=".bbclass"
+                                # Find a 'require/include xxxx'
+                                if m:
+                                    self.print_cross_files(bbpath, keyword, layername, f, m.group(1) + bbclass, args.filenames, ignore_layers)
+                                line = ffile.readline()
+
+    def print_cross_files(self, bbpath, keyword, layername, f, needed_filename, show_filenames, ignore_layers):
+        """Print the depends that crosses a layer boundary"""
+        needed_file = bb.utils.which(bbpath, needed_filename)
+        if needed_file:
+            # Which layer is this file from
+            needed_layername = self.get_file_layer(needed_file)
+            if needed_layername != layername and not needed_layername in ignore_layers:
+                if not show_filenames:
+                    f = self.remove_layer_prefix(f)
+                    needed_file = self.remove_layer_prefix(needed_file)
+                logger.plain("%s %s %s" %(f, keyword, needed_file))
+
+    def match_inherit(self, line):
+        """Match the inherit xxx line"""
+        return (self.inherit_re.match(line), "inherits")
+
+    def match_require_include(self, line):
+        """Match the require/include xxx line"""
+        m = self.require_re.match(line)
+        keyword = "requires"
+        if not m:
+            m = self.include_re.match(line)
+            keyword = "includes"
+        return (m, keyword)
+
+    def check_cross_depends(self, keyword, layername, f, needed_file, show_filenames, ignore_layers):
+        """Print the DEPENDS/RDEPENDS file that crosses a layer boundary"""
+        best_realfn = bb.cache.Cache.virtualfn2realfn(needed_file)[0]
+        needed_layername = self.get_file_layer(best_realfn)
+        if needed_layername != layername and not needed_layername in ignore_layers:
+            if not show_filenames:
+                f = self.remove_layer_prefix(f)
+                best_realfn = self.remove_layer_prefix(best_realfn)
+
+            logger.plain("%s %s %s" % (f, keyword, best_realfn))
+
+
+def main():
+
+    cmds = Commands()
+
+    def add_command(cmdname, function, *args, **kwargs):
+        # Convert docstring for function to help (one-liner shown in main --help) and description (shown in subcommand --help)
+        docsplit = function.__doc__.splitlines()
+        help = docsplit[0]
+        if len(docsplit) > 1:
+            desc = '\n'.join(docsplit[1:])
+        else:
+            desc = help
+        subparser = subparsers.add_parser(cmdname, *args, help=help, description=desc, formatter_class=argparse.RawTextHelpFormatter, **kwargs)
+        subparser.set_defaults(func=function)
+        return subparser
+
+    parser = argparse.ArgumentParser(description="BitBake layers utility",
+                                     epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
+    parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
+    parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
+    subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
+
+    parser_show_layers = add_command('show-layers', cmds.do_show_layers)
+
+    parser_add_layer = add_command('add-layer', cmds.do_add_layer)
+    parser_add_layer.add_argument('layerdir', help='Layer directory to add')
+
+    parser_remove_layer = add_command('remove-layer', cmds.do_remove_layer)
+    parser_remove_layer.add_argument('layerdir', help='Layer directory to remove (wildcards allowed, enclose in quotes to avoid shell expansion)')
+    parser_remove_layer.set_defaults(func=cmds.do_remove_layer)
+
+    parser_show_overlayed = add_command('show-overlayed', cmds.do_show_overlayed)
+    parser_show_overlayed.add_argument('-f', '--filenames', help='instead of the default formatting, list filenames of higher priority recipes with the ones they overlay indented underneath', action='store_true')
+    parser_show_overlayed.add_argument('-s', '--same-version', help='only list overlayed recipes where the version is the same', action='store_true')
+
+    parser_show_recipes = add_command('show-recipes', cmds.do_show_recipes)
+    parser_show_recipes.add_argument('-f', '--filenames', help='instead of the default formatting, list filenames of higher priority recipes with the ones they overlay indented underneath', action='store_true')
+    parser_show_recipes.add_argument('-m', '--multiple', help='only list where multiple recipes (in the same layer or different layers) exist for the same recipe name', action='store_true')
+    parser_show_recipes.add_argument('-i', '--inherits', help='only list recipes that inherit the named class', metavar='CLASS', default='')
+    parser_show_recipes.add_argument('pnspec', nargs='?', help='optional recipe name specification (wildcards allowed, enclose in quotes to avoid shell expansion)')
+
+    parser_show_appends = add_command('show-appends', cmds.do_show_appends)
+
+    parser_flatten = add_command('flatten', cmds.do_flatten)
+    parser_flatten.add_argument('layer', nargs='*', help='Optional layer(s) to flatten (otherwise all are flattened)')
+    parser_flatten.add_argument('outputdir', help='Output directory')
+
+    parser_show_cross_depends = add_command('show-cross-depends', cmds.do_show_cross_depends)
+    parser_show_cross_depends.add_argument('-f', '--filenames', help='show full file path', action='store_true')
+    parser_show_cross_depends.add_argument('-i', '--ignore', help='ignore dependencies on items in the specified layer(s) (split multiple layer names with commas, no spaces)', metavar='LAYERNAME')
+
+    parser_layerindex_fetch = add_command('layerindex-fetch', cmds.do_layerindex_fetch)
+    parser_layerindex_fetch.add_argument('-n', '--show-only', help='show dependencies and do nothing else', action='store_true')
+    parser_layerindex_fetch.add_argument('-b', '--branch', help='branch name to fetch (default %(default)s)', default='master')
+    parser_layerindex_fetch.add_argument('-i', '--ignore', help='assume the specified layers do not need to be fetched/added (separate multiple layers with commas, no spaces)', metavar='LAYER')
+    parser_layerindex_fetch.add_argument('layername', nargs='+', help='layer to fetch')
+
+    parser_layerindex_show_depends = add_command('layerindex-show-depends', cmds.do_layerindex_show_depends)
+    parser_layerindex_show_depends.add_argument('-b', '--branch', help='branch name to fetch (default %(default)s)', default='master')
+    parser_layerindex_show_depends.add_argument('layername', nargs='+', help='layer to query')
+
+    args = parser.parse_args()
+
+    if args.debug:
+        logger.setLevel(logging.DEBUG)
+    elif args.quiet:
+        logger.setLevel(logging.ERROR)
+
+    try:
+        ret = args.func(args)
+    except UserError as err:
+        logger.error(str(err))
+        ret = 1
+
+    return ret
+
+
+if __name__ == "__main__":
+    try:
+        ret = main()
+    except Exception:
+        ret = 1
+        import traceback
+        traceback.print_exc(5)
+    sys.exit(ret)

+ 55 - 0
bitbake/bin/bitbake-prserv

@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+import os
+import sys,logging
+import optparse
+
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),'lib'))
+
+import prserv
+import prserv.serv
+
+__version__="1.0.0"
+
+PRHOST_DEFAULT='0.0.0.0'
+PRPORT_DEFAULT=8585
+
+def main():
+    parser = optparse.OptionParser(
+        version="Bitbake PR Service Core version %s, %%prog version %s" % (prserv.__version__, __version__),
+        usage = "%prog < --start | --stop > [options]")
+
+    parser.add_option("-f", "--file", help="database filename(default: prserv.sqlite3)", action="store",
+                      dest="dbfile", type="string", default="prserv.sqlite3")
+    parser.add_option("-l", "--log", help="log filename(default: prserv.log)", action="store",
+                      dest="logfile", type="string", default="prserv.log")
+    parser.add_option("--loglevel", help="logging level, i.e. CRITICAL, ERROR, WARNING, INFO, DEBUG",
+                      action = "store", type="string", dest="loglevel", default = "INFO")
+    parser.add_option("--start", help="start daemon",
+                      action="store_true", dest="start")
+    parser.add_option("--stop", help="stop daemon",
+                      action="store_true", dest="stop")
+    parser.add_option("--host", help="ip address to bind", action="store",
+                      dest="host", type="string", default=PRHOST_DEFAULT)
+    parser.add_option("--port", help="port number(default: 8585)", action="store",
+                      dest="port", type="int", default=PRPORT_DEFAULT)
+
+    options, args = parser.parse_args(sys.argv)
+    prserv.init_logger(os.path.abspath(options.logfile),options.loglevel)
+
+    if options.start:
+        ret=prserv.serv.start_daemon(options.dbfile, options.host, options.port,os.path.abspath(options.logfile))
+    elif options.stop:
+        ret=prserv.serv.stop_daemon(options.host, options.port)
+    else:
+        ret=parser.print_help()
+    return ret
+
+if __name__ == "__main__":
+    try:
+        ret = main()
+    except Exception:
+        ret = 1
+        import traceback
+        traceback.print_exc(5)
+    sys.exit(ret)
+

+ 55 - 0
bitbake/bin/bitbake-selftest

@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2012 Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import sys, logging
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
+
+import unittest
+try:
+    import bb
+except RuntimeError as exc:
+    sys.exit(str(exc))
+
+def usage():
+    print('usage: [BB_SKIP_NETTESTS=yes] %s [-v] [testname1 [testname2]...]' % os.path.basename(sys.argv[0]))
+
+verbosity = 1
+
+tests = sys.argv[1:]
+if '-v' in sys.argv:
+    tests.remove('-v')
+    verbosity = 2
+
+if tests:
+    if '--help' in sys.argv[1:]:
+        usage()
+        sys.exit(0)
+else:
+    tests = ["bb.tests.codeparser",
+             "bb.tests.cow",
+             "bb.tests.data",
+             "bb.tests.fetch",
+             "bb.tests.parse",
+             "bb.tests.utils"]
+
+for t in tests:
+    t = '.'.join(t.split('.')[:3])
+    __import__(t)
+
+unittest.main(argv=["bitbake-selftest"] + tests, verbosity=verbosity)
+

+ 447 - 0
bitbake/bin/bitbake-worker

@@ -0,0 +1,447 @@
+#!/usr/bin/env python
+
+import os
+import sys
+import warnings
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
+from bb import fetch2
+import logging
+import bb
+import select
+import errno
+import signal
+from multiprocessing import Lock
+
+# Users shouldn't be running this code directly
+if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
+    print("bitbake-worker is meant for internal execution by bitbake itself, please don't use it standalone.")
+    sys.exit(1)
+
+profiling = False
+if sys.argv[1].startswith("decafbadbad"):
+    profiling = True
+    try:
+        import cProfile as profile
+    except:
+        import profile
+
+# Unbuffer stdout to avoid log truncation in the event
+# of an unorderly exit as well as to provide timely
+# updates to log files for use with tail
+try:
+    if sys.stdout.name == '<stdout>':
+        sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
+except:
+    pass
+
+logger = logging.getLogger("BitBake")
+
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+    bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
+
+
+worker_pipe = sys.stdout.fileno()
+bb.utils.nonblockingfd(worker_pipe)
+# Need to guard against multiprocessing being used in child processes
+# and multiple processes trying to write to the parent at the same time
+worker_pipe_lock = None
+
+handler = bb.event.LogHandler()
+logger.addHandler(handler)
+
+if 0:
+    # Code to write out a log file of all events passing through the worker
+    logfilename = "/tmp/workerlogfile"
+    format_str = "%(levelname)s: %(message)s"
+    conlogformat = bb.msg.BBLogFormatter(format_str)
+    consolelog = logging.FileHandler(logfilename)
+    bb.msg.addDefaultlogFilter(consolelog)
+    consolelog.setFormatter(conlogformat)
+    logger.addHandler(consolelog)
+
+worker_queue = ""
+
+def worker_fire(event, d):
+    data = "<event>" + pickle.dumps(event) + "</event>"
+    worker_fire_prepickled(data)
+
+def worker_fire_prepickled(event):
+    global worker_queue
+
+    worker_queue = worker_queue + event
+    worker_flush()
+
+def worker_flush():
+    global worker_queue, worker_pipe
+
+    if not worker_queue:
+        return
+
+    try:
+        written = os.write(worker_pipe, worker_queue)
+        worker_queue = worker_queue[written:]
+    except (IOError, OSError) as e:
+        if e.errno != errno.EAGAIN and e.errno != errno.EPIPE:
+            raise
+
+def worker_child_fire(event, d):
+    global worker_pipe
+    global worker_pipe_lock
+
+    data = "<event>" + pickle.dumps(event) + "</event>"
+    try:
+        worker_pipe_lock.acquire()
+        worker_pipe.write(data)
+        worker_pipe_lock.release()
+    except IOError:
+        sigterm_handler(None, None)
+        raise
+
+bb.event.worker_fire = worker_fire
+
+lf = None
+#lf = open("/tmp/workercommandlog", "w+")
+def workerlog_write(msg):
+    if lf:
+        lf.write(msg)
+        lf.flush()
+
+def sigterm_handler(signum, frame):
+    signal.signal(signal.SIGTERM, signal.SIG_DFL)
+    os.killpg(0, signal.SIGTERM)
+    sys.exit()
+
+def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdata, quieterrors=False):
+    # We need to setup the environment BEFORE the fork, since
+    # a fork() or exec*() activates PSEUDO...
+
+    envbackup = {}
+    fakeenv = {}
+    umask = None
+
+    taskdep = workerdata["taskdeps"][fn]
+    if 'umask' in taskdep and taskname in taskdep['umask']:
+        # umask might come in as a number or text string..
+        try:
+             umask = int(taskdep['umask'][taskname],8)
+        except TypeError:
+             umask = taskdep['umask'][taskname]
+
+    # We can't use the fakeroot environment in a dry run as it possibly hasn't been built
+    if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not cfg.dry_run:
+        envvars = (workerdata["fakerootenv"][fn] or "").split()
+        for key, value in (var.split('=') for var in envvars):
+            envbackup[key] = os.environ.get(key)
+            os.environ[key] = value
+            fakeenv[key] = value
+
+        fakedirs = (workerdata["fakerootdirs"][fn] or "").split()
+        for p in fakedirs:
+            bb.utils.mkdirhier(p)
+        logger.debug(2, 'Running %s:%s under fakeroot, fakedirs: %s' %
+                        (fn, taskname, ', '.join(fakedirs)))
+    else:
+        envvars = (workerdata["fakerootnoenv"][fn] or "").split()
+        for key, value in (var.split('=') for var in envvars):
+            envbackup[key] = os.environ.get(key)
+            os.environ[key] = value
+            fakeenv[key] = value
+
+    sys.stdout.flush()
+    sys.stderr.flush()
+
+    try:
+        pipein, pipeout = os.pipe()
+        pipein = os.fdopen(pipein, 'rb', 4096)
+        pipeout = os.fdopen(pipeout, 'wb', 0)
+        pid = os.fork()
+    except OSError as e:
+        bb.msg.fatal("RunQueue", "fork failed: %d (%s)" % (e.errno, e.strerror))
+
+    if pid == 0:
+        def child():
+            global worker_pipe
+            global worker_pipe_lock
+            pipein.close()
+
+            signal.signal(signal.SIGTERM, sigterm_handler)
+            # Let SIGHUP exit as SIGTERM
+            signal.signal(signal.SIGHUP, sigterm_handler)
+            bb.utils.signal_on_parent_exit("SIGTERM")
+
+            # Save out the PID so that the event can include it the
+            # events
+            bb.event.worker_pid = os.getpid()
+            bb.event.worker_fire = worker_child_fire
+            worker_pipe = pipeout
+            worker_pipe_lock = Lock()
+
+            # Make the child the process group leader and ensure no
+            # child process will be controlled by the current terminal
+            # This ensures signals sent to the controlling terminal like Ctrl+C
+            # don't stop the child processes.
+            os.setsid()
+            # No stdin
+            newsi = os.open(os.devnull, os.O_RDWR)
+            os.dup2(newsi, sys.stdin.fileno())
+
+            if umask:
+                os.umask(umask)
+
+            data.setVar("BB_WORKERCONTEXT", "1")
+            data.setVar("BB_TASKDEPDATA", taskdepdata)
+            data.setVar("BUILDNAME", workerdata["buildname"])
+            data.setVar("DATE", workerdata["date"])
+            data.setVar("TIME", workerdata["time"])
+            bb.parse.siggen.set_taskdata(workerdata["sigdata"])
+            ret = 0
+            try:
+                the_data = bb.cache.Cache.loadDataFull(fn, appends, data)
+                the_data.setVar('BB_TASKHASH', workerdata["runq_hash"][task])
+
+                bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN", True), taskname.replace("do_", "")))
+
+                # exported_vars() returns a generator which *cannot* be passed to os.environ.update() 
+                # successfully. We also need to unset anything from the environment which shouldn't be there 
+                exports = bb.data.exported_vars(the_data)
+                bb.utils.empty_environment()
+                for e, v in exports:
+                    os.environ[e] = v
+                for e in fakeenv:
+                    os.environ[e] = fakeenv[e]
+                    the_data.setVar(e, fakeenv[e])
+                    the_data.setVarFlag(e, 'export', "1")
+
+                if quieterrors:
+                    the_data.setVarFlag(taskname, "quieterrors", "1")
+
+            except Exception as exc:
+                if not quieterrors:
+                    logger.critical(str(exc))
+                os._exit(1)
+            try:
+                if cfg.dry_run:
+                    return 0
+                return bb.build.exec_task(fn, taskname, the_data, cfg.profile)
+            except:
+                os._exit(1)
+        if not profiling:
+            os._exit(child())
+        else:
+            profname = "profile-%s.log" % (fn.replace("/", "-") + "-" + taskname)
+            prof = profile.Profile()
+            try: 
+                ret = profile.Profile.runcall(prof, child)
+            finally:
+                prof.dump_stats(profname)
+                bb.utils.process_profilelog(profname)
+                os._exit(ret)
+    else:
+        for key, value in envbackup.iteritems():
+            if value is None:
+                del os.environ[key]
+            else:
+                os.environ[key] = value
+
+    return pid, pipein, pipeout
+
+class runQueueWorkerPipe():
+    """
+    Abstraction for a pipe between a worker thread and the worker server
+    """
+    def __init__(self, pipein, pipeout):
+        self.input = pipein
+        if pipeout:
+            pipeout.close()
+        bb.utils.nonblockingfd(self.input)
+        self.queue = ""
+
+    def read(self):
+        start = len(self.queue)
+        try:
+            self.queue = self.queue + self.input.read(102400)
+        except (OSError, IOError) as e:
+            if e.errno != errno.EAGAIN:
+                raise
+
+        end = len(self.queue)
+        index = self.queue.find("</event>")
+        while index != -1:
+            worker_fire_prepickled(self.queue[:index+8])
+            self.queue = self.queue[index+8:]
+            index = self.queue.find("</event>")
+        return (end > start)
+
+    def close(self):
+        while self.read():
+            continue
+        if len(self.queue) > 0:
+            print("Warning, worker child left partial message: %s" % self.queue)
+        self.input.close()
+
+normalexit = False
+
+class BitbakeWorker(object):
+    def __init__(self, din):
+        self.input = din
+        bb.utils.nonblockingfd(self.input)
+        self.queue = ""
+        self.cookercfg = None
+        self.databuilder = None
+        self.data = None
+        self.build_pids = {}
+        self.build_pipes = {}
+    
+        signal.signal(signal.SIGTERM, self.sigterm_exception)
+        # Let SIGHUP exit as SIGTERM
+        signal.signal(signal.SIGHUP, self.sigterm_exception)
+        if "beef" in sys.argv[1]:
+            bb.utils.set_process_name("Worker (Fakeroot)")
+        else:
+            bb.utils.set_process_name("Worker")
+
+    def sigterm_exception(self, signum, stackframe):
+        if signum == signal.SIGTERM:
+            bb.warn("Worker received SIGTERM, shutting down...")
+        elif signum == signal.SIGHUP:
+            bb.warn("Worker received SIGHUP, shutting down...")
+        self.handle_finishnow(None)
+        signal.signal(signal.SIGTERM, signal.SIG_DFL)
+        os.kill(os.getpid(), signal.SIGTERM)
+
+    def serve(self):        
+        while True:
+            (ready, _, _) = select.select([self.input] + [i.input for i in self.build_pipes.values()], [] , [], 1)
+            if self.input in ready:
+                try:
+                    r = self.input.read()
+                    if len(r) == 0:
+                        # EOF on pipe, server must have terminated
+                        self.sigterm_exception(signal.SIGTERM, None)
+                    self.queue = self.queue + r
+                except (OSError, IOError):
+                    pass
+            if len(self.queue):
+                self.handle_item("cookerconfig", self.handle_cookercfg)
+                self.handle_item("workerdata", self.handle_workerdata)
+                self.handle_item("runtask", self.handle_runtask)
+                self.handle_item("finishnow", self.handle_finishnow)
+                self.handle_item("ping", self.handle_ping)
+                self.handle_item("quit", self.handle_quit)
+
+            for pipe in self.build_pipes:
+                self.build_pipes[pipe].read()
+            if len(self.build_pids):
+                self.process_waitpid()
+            worker_flush()
+
+
+    def handle_item(self, item, func):
+        if self.queue.startswith("<" + item + ">"):
+            index = self.queue.find("</" + item + ">")
+            while index != -1:
+                func(self.queue[(len(item) + 2):index])
+                self.queue = self.queue[(index + len(item) + 3):]
+                index = self.queue.find("</" + item + ">")
+
+    def handle_cookercfg(self, data):
+        self.cookercfg = pickle.loads(data)
+        self.databuilder = bb.cookerdata.CookerDataBuilder(self.cookercfg, worker=True)
+        self.databuilder.parseBaseConfiguration()
+        self.data = self.databuilder.data
+
+    def handle_workerdata(self, data):
+        self.workerdata = pickle.loads(data)
+        bb.msg.loggerDefaultDebugLevel = self.workerdata["logdefaultdebug"]
+        bb.msg.loggerDefaultVerbose = self.workerdata["logdefaultverbose"]
+        bb.msg.loggerVerboseLogs = self.workerdata["logdefaultverboselogs"]
+        bb.msg.loggerDefaultDomains = self.workerdata["logdefaultdomain"]
+        self.data.setVar("PRSERV_HOST", self.workerdata["prhost"])
+
+    def handle_ping(self, _):
+        workerlog_write("Handling ping\n")
+
+        logger.warn("Pong from bitbake-worker!")
+
+    def handle_quit(self, data):
+        workerlog_write("Handling quit\n")
+
+        global normalexit
+        normalexit = True
+        sys.exit(0)
+
+    def handle_runtask(self, data):
+        fn, task, taskname, quieterrors, appends, taskdepdata = pickle.loads(data)
+        workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
+
+        pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.workerdata, fn, task, taskname, appends, taskdepdata, quieterrors)
+
+        self.build_pids[pid] = task
+        self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout)
+
+    def process_waitpid(self):
+        """
+        Return none is there are no processes awaiting result collection, otherwise
+        collect the process exit codes and close the information pipe.
+        """
+        try:
+            pid, status = os.waitpid(-1, os.WNOHANG)
+            if pid == 0 or os.WIFSTOPPED(status):
+                return None
+        except OSError:
+            return None
+
+        workerlog_write("Exit code of %s for pid %s\n" % (status, pid))
+
+        if os.WIFEXITED(status):
+            status = os.WEXITSTATUS(status)
+        elif os.WIFSIGNALED(status):
+            # Per shell conventions for $?, when a process exits due to
+            # a signal, we return an exit code of 128 + SIGNUM
+            status = 128 + os.WTERMSIG(status)
+
+        task = self.build_pids[pid]
+        del self.build_pids[pid]
+
+        self.build_pipes[pid].close()
+        del self.build_pipes[pid]
+
+        worker_fire_prepickled("<exitcode>" + pickle.dumps((task, status)) + "</exitcode>")
+
+    def handle_finishnow(self, _):
+        if self.build_pids:
+            logger.info("Sending SIGTERM to remaining %s tasks", len(self.build_pids))
+            for k, v in self.build_pids.iteritems():
+                try:
+                    os.kill(-k, signal.SIGTERM)
+                    os.waitpid(-1, 0)
+                except:
+                    pass
+        for pipe in self.build_pipes:
+            self.build_pipes[pipe].read()
+
+try:
+    worker = BitbakeWorker(sys.stdin)
+    if not profiling:
+        worker.serve()
+    else:
+        profname = "profile-worker.log"
+        prof = profile.Profile()
+        try:
+            profile.Profile.runcall(prof, worker.serve)
+        finally:
+            prof.dump_stats(profname)
+            bb.utils.process_profilelog(profname)
+except BaseException as e:
+    if not normalexit:
+        import traceback
+        sys.stderr.write(traceback.format_exc())
+        sys.stderr.write(str(e))
+while len(worker_queue):
+    worker_flush()
+workerlog_write("exitting")
+sys.exit(0)
+

+ 531 - 0
bitbake/bin/bitdoc

@@ -0,0 +1,531 @@
+#!/usr/bin/env python
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Copyright (C) 2005 Holger Hans Peter Freyther
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import optparse, os, sys
+
+# bitbake
+sys.path.append(os.path.join(os.path.dirname(os.path.dirname(__file__), 'lib'))
+import bb
+import bb.parse
+from   string import split, join
+
+__version__ = "0.0.2"
+
+class HTMLFormatter:
+    """
+    Simple class to help to generate some sort of HTML files. It is
+    quite inferior solution compared to docbook, gtkdoc, doxygen but it
+    should work for now.
+    We've a global introduction site (index.html) and then one site for
+    the list of keys (alphabetical sorted) and one for the list of groups,
+    one site for each key with links to the relations and groups.
+
+        index.html
+        all_keys.html
+        all_groups.html
+        groupNAME.html
+        keyNAME.html
+    """
+
+    def replace(self, text, *pairs):
+        """
+        From pydoc... almost identical at least
+        """
+        while pairs:
+            (a, b) = pairs[0]
+            text = join(split(text, a), b)
+            pairs = pairs[1:]
+        return text
+    def escape(self, text):
+        """
+        Escape string to be conform HTML
+        """
+        return self.replace(text, 
+                            ('&', '&amp;'), 
+                            ('<', '&lt;' ),
+                            ('>', '&gt;' ) )
+    def createNavigator(self):
+        """
+        Create the navgiator
+        """
+        return """<table class="navigation" width="100%" summary="Navigation header" cellpadding="2" cellspacing="2">
+<tr valign="middle">
+<td><a accesskey="g" href="index.html">Home</a></td>
+<td><a accesskey="n" href="all_groups.html">Groups</a></td>
+<td><a accesskey="u" href="all_keys.html">Keys</a></td>
+</tr></table>
+"""
+
+    def relatedKeys(self, item):
+        """
+        Create HTML to link to foreign keys
+        """
+
+        if len(item.related()) == 0:
+            return ""
+
+        txt = "<p><b>See also:</b><br>"
+        txts = []
+        for it in item.related():
+            txts.append("""<a href="key%(it)s.html">%(it)s</a>""" % vars() )
+
+        return txt + ",".join(txts)
+
+    def groups(self, item):
+        """
+        Create HTML to link to related groups
+        """
+
+        if len(item.groups()) == 0:
+            return ""
+
+
+        txt = "<p><b>See also:</b><br>"
+        txts = []
+        for group in item.groups():
+            txts.append( """<a href="group%s.html">%s</a> """ % (group, group) )
+
+        return txt + ",".join(txts)
+
+
+    def createKeySite(self, item):
+        """
+        Create a site for a key. It contains the header/navigator, a heading,
+        the description, links to related keys and to the groups.
+        """
+
+        return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
+<html><head><title>Key %s</title></head>
+<link rel="stylesheet" href="style.css" type="text/css">
+<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
+%s
+<h2><span class="refentrytitle">%s</span></h2>
+
+<div class="refsynopsisdiv">
+<h2>Synopsis</h2>
+<p>
+%s
+</p>
+</div>
+
+<div class="refsynopsisdiv">
+<h2>Related Keys</h2>
+<p>
+%s
+</p>
+</div>
+
+<div class="refsynopsisdiv">
+<h2>Groups</h2>
+<p>
+%s
+</p>
+</div>
+
+
+</body>
+"""     % (item.name(), self.createNavigator(), item.name(), 
+           self.escape(item.description()), self.relatedKeys(item), self.groups(item))
+
+    def createGroupsSite(self, doc):
+        """
+        Create the Group Overview site
+        """
+
+        groups = ""
+        sorted_groups = sorted(doc.groups())
+        for group in sorted_groups:
+            groups += """<a href="group%s.html">%s</a><br>""" % (group, group)
+
+        return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
+<html><head><title>Group overview</title></head>
+<link rel="stylesheet" href="style.css" type="text/css">
+<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
+%s
+<h2>Available Groups</h2>
+%s
+</body>
+""" % (self.createNavigator(), groups)
+
+    def createIndex(self):
+        """
+        Create the index file
+        """
+
+        return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
+<html><head><title>Bitbake Documentation</title></head>
+<link rel="stylesheet" href="style.css" type="text/css">
+<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
+%s
+<h2>Documentation Entrance</h2>
+<a href="all_groups.html">All available groups</a><br>
+<a href="all_keys.html">All available keys</a><br>
+</body>
+""" % self.createNavigator()
+
+    def createKeysSite(self, doc):
+        """
+        Create Overview of all avilable keys
+        """
+        keys = ""
+        sorted_keys = sorted(doc.doc_keys())
+        for key in sorted_keys:
+            keys += """<a href="key%s.html">%s</a><br>""" % (key, key)
+
+        return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
+<html><head><title>Key overview</title></head>
+<link rel="stylesheet" href="style.css" type="text/css">
+<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
+%s
+<h2>Available Keys</h2>
+%s
+</body>
+""" % (self.createNavigator(), keys)
+
+    def createGroupSite(self, gr, items, _description = None):
+        """
+        Create a site for a group:
+        Group the name of the group, items contain the name of the keys
+        inside this group
+        """
+        groups = ""
+        description = ""
+
+        # create a section with the group descriptions
+        if _description:
+            description  += "<h2 Description of Grozp %s</h2>" % gr
+            description  += _description
+
+        items.sort(lambda x, y:cmp(x.name(), y.name()))
+        for group in items:
+            groups += """<a href="key%s.html">%s</a><br>""" % (group.name(), group.name())
+
+        return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
+<html><head><title>Group %s</title></head>
+<link rel="stylesheet" href="style.css" type="text/css">
+<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
+%s
+%s
+<div class="refsynopsisdiv">
+<h2>Keys in Group %s</h2>
+<pre class="synopsis">
+%s
+</pre>
+</div>
+</body>
+""" % (gr, self.createNavigator(), description, gr, groups)
+
+
+
+    def createCSS(self):
+        """
+        Create the CSS file
+        """
+        return """.synopsis, .classsynopsis
+{
+  background: #eeeeee;
+  border: solid 1px #aaaaaa;
+  padding: 0.5em;
+}
+.programlisting
+{
+  background: #eeeeff;
+  border: solid 1px #aaaaff;
+  padding: 0.5em;
+}
+.variablelist
+{
+  padding: 4px;
+  margin-left: 3em;
+}
+.variablelist td:first-child
+{
+  vertical-align: top;
+}
+table.navigation
+{
+  background: #ffeeee;
+  border: solid 1px #ffaaaa;
+  margin-top: 0.5em;
+  margin-bottom: 0.5em;
+}
+.navigation a
+{
+  color: #770000;
+}
+.navigation a:visited
+{
+  color: #550000;
+}
+.navigation .title
+{
+  font-size: 200%;
+}
+div.refnamediv
+{
+  margin-top: 2em;
+}
+div.gallery-float
+{
+  float: left;
+  padding: 10px;
+}
+div.gallery-float img
+{
+  border-style: none;
+}
+div.gallery-spacer
+{
+  clear: both;
+}
+a
+{
+  text-decoration: none;
+}
+a:hover
+{
+  text-decoration: underline;
+  color: #FF0000;
+}
+"""
+
+
+
+class DocumentationItem:
+    """
+    A class to hold information about a configuration
+    item. It contains the key name, description, a list of related names,
+    and the group this item is contained in.
+    """
+
+    def __init__(self):
+        self._groups  = []
+        self._related = []
+        self._name    = ""
+        self._desc    = ""
+
+    def groups(self):
+        return self._groups
+
+    def name(self):
+        return self._name
+
+    def description(self):
+        return self._desc
+
+    def related(self):
+        return self._related
+
+    def setName(self, name):
+        self._name = name
+
+    def setDescription(self, desc):
+        self._desc = desc
+
+    def addGroup(self, group):
+        self._groups.append(group)
+
+    def addRelation(self, relation):
+        self._related.append(relation)
+
+    def sort(self):
+        self._related.sort()
+        self._groups.sort()
+
+
+class Documentation:
+    """
+    Holds the documentation... with mappings from key to items...
+    """
+
+    def __init__(self):
+        self.__keys   = {}
+        self.__groups = {}
+
+    def insert_doc_item(self, item):
+        """
+        Insert the Doc Item into the internal list
+        of representation
+        """
+        item.sort()
+        self.__keys[item.name()] = item
+
+        for group in item.groups():
+            if not group in self.__groups:
+                self.__groups[group] = []
+            self.__groups[group].append(item)
+            self.__groups[group].sort()
+
+
+    def doc_item(self, key):
+        """
+        Return the DocumentationInstance describing the key
+        """
+        try:
+            return self.__keys[key]
+        except KeyError:
+            return None
+
+    def doc_keys(self):
+        """
+        Return the documented KEYS (names)
+        """
+        return self.__keys.keys()
+
+    def groups(self):
+        """
+        Return the names of available groups
+        """
+        return self.__groups.keys()
+
+    def group_content(self, group_name):
+        """
+        Return a list of keys/names that are in a specefic
+        group or the empty list
+        """
+        try:
+            return self.__groups[group_name]
+        except KeyError:
+            return []
+
+
+def parse_cmdline(args):
+    """
+    Parse the CMD line and return the result as a n-tuple
+    """
+
+    parser = optparse.OptionParser( version = "Bitbake Documentation Tool Core version %s, %%prog version %s" % (bb.__version__, __version__))
+    usage  = """%prog [options]
+
+Create a set of html pages (documentation) for a bitbake.conf....
+"""
+
+    # Add the needed options
+    parser.add_option( "-c", "--config", help = "Use the specified configuration file as source",
+                       action = "store", dest = "config", default = os.path.join("conf", "documentation.conf") )
+
+    parser.add_option( "-o", "--output", help = "Output directory for html files",
+                       action = "store", dest = "output", default = "html/" )
+
+    parser.add_option( "-D",  "--debug", help = "Increase the debug level",
+                       action = "count", dest = "debug", default = 0 )
+
+    parser.add_option( "-v", "--verbose", help = "output more chit-char to the terminal",
+                       action = "store_true", dest = "verbose", default = False )
+
+    options, args = parser.parse_args( sys.argv )
+ 
+    bb.msg.init_msgconfig(options.verbose, options.debug)
+
+    return options.config, options.output
+
+def main():
+    """
+    The main Method
+    """
+
+    (config_file, output_dir) = parse_cmdline( sys.argv )
+
+    # right to let us load the file now
+    try:
+        documentation = bb.parse.handle( config_file, bb.data.init() )
+    except IOError:
+        bb.fatal( "Unable to open %s" % config_file )
+    except bb.parse.ParseError:
+        bb.fatal( "Unable to parse %s" % config_file )
+
+    if isinstance(documentation, dict):
+        documentation = documentation[""]
+
+    # Assuming we've the file loaded now, we will initialize the 'tree'
+    doc = Documentation()
+
+    # defined states
+    state_begin = 0
+    state_see   = 1
+    state_group = 2
+
+    for key in bb.data.keys(documentation):
+        data   = documentation.getVarFlag(key, "doc", False)
+        if not data:
+            continue
+
+        # The Documentation now starts
+        doc_ins = DocumentationItem()
+        doc_ins.setName(key)
+
+
+        tokens = data.split(' ')
+        state = state_begin
+        string= ""
+        for token in tokens:
+            token = token.strip(',')
+
+            if not state == state_see and token == "@see":
+                state = state_see
+                continue
+            elif not state == state_group and token  == "@group":
+                state = state_group
+                continue
+
+            if state == state_begin:
+                string += " %s" % token
+            elif state == state_see:
+                doc_ins.addRelation(token)
+            elif state == state_group:
+                doc_ins.addGroup(token)
+
+        # set the description
+        doc_ins.setDescription(string)
+        doc.insert_doc_item(doc_ins)
+
+    # let us create the HTML now
+    bb.utils.mkdirhier(output_dir)
+    os.chdir(output_dir)
+
+    # Let us create the sites now. We do it in the following order
+    # Start with the index.html. It will point to sites explaining all
+    # keys and groups
+    html_slave = HTMLFormatter()
+
+    f = file('style.css', 'w')
+    print >> f, html_slave.createCSS()
+
+    f = file('index.html', 'w')
+    print >> f, html_slave.createIndex()
+
+    f = file('all_groups.html', 'w')
+    print >> f, html_slave.createGroupsSite(doc)
+
+    f = file('all_keys.html', 'w')
+    print >> f, html_slave.createKeysSite(doc)
+
+    # now for each group create the site
+    for group in doc.groups():
+        f = file('group%s.html' % group, 'w')
+        print >> f, html_slave.createGroupSite(group, doc.group_content(group))
+
+    # now for the keys
+    for key in doc.doc_keys():
+        f = file('key%s.html' % doc.doc_item(key).name(), 'w')
+        print >> f, html_slave.createKeySite(doc.doc_item(key))
+
+
+if __name__ == "__main__":
+    main()

+ 122 - 0
bitbake/bin/image-writer

@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Wind River Systems, Inc.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+import os
+import sys
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname( \
+                                    os.path.abspath(__file__))), 'lib'))
+try:
+    import bb
+except RuntimeError as exc:
+    sys.exit(str(exc))
+
+import gtk
+import optparse
+import pygtk
+
+from bb.ui.crumbs.hobwidget import HobAltButton, HobButton
+from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
+from bb.ui.crumbs.hig.deployimagedialog import DeployImageDialog
+from bb.ui.crumbs.hig.imageselectiondialog import ImageSelectionDialog
+
+# I put all the fs bitbake supported here. Need more test.
+DEPLOYABLE_IMAGE_TYPES = ["jffs2", "cramfs", "ext2", "ext3", "ext4", "btrfs", "squashfs", "ubi", "vmdk"]
+Title = "USB Image Writer"
+
+class DeployWindow(gtk.Window):
+    def __init__(self, image_path=''):
+        super(DeployWindow, self).__init__()
+
+        if len(image_path) > 0:
+            valid = True
+            if not os.path.exists(image_path):
+                valid = False
+                lbl = "<b>Invalid image file path: %s.</b>\nPress <b>Select Image</b> to select an image." % image_path
+            else:
+                image_path = os.path.abspath(image_path)
+                extend_name = os.path.splitext(image_path)[1][1:]
+                if extend_name not in DEPLOYABLE_IMAGE_TYPES:
+                    valid = False
+                    lbl = "<b>Undeployable imge type: %s</b>\nPress <b>Select Image</b> to select an image." % extend_name
+
+            if not valid:
+                image_path = ''
+                crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
+                button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
+                HobButton.style_button(button)
+                crumbs_dialog.run()
+                crumbs_dialog.destroy()
+
+        self.deploy_dialog = DeployImageDialog(Title, image_path, self,
+                                        gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT
+                                        | gtk.DIALOG_NO_SEPARATOR, None, standalone=True)
+        close_button = self.deploy_dialog.add_button("Close", gtk.RESPONSE_NO)
+        HobAltButton.style_button(close_button)
+        close_button.connect('clicked', gtk.main_quit)
+
+        write_button = self.deploy_dialog.add_button("Write USB image", gtk.RESPONSE_YES)
+        HobAltButton.style_button(write_button)
+
+        self.deploy_dialog.connect('select_image_clicked', self.select_image_clicked_cb)
+        self.deploy_dialog.connect('destroy', gtk.main_quit)
+        response = self.deploy_dialog.show()
+
+    def select_image_clicked_cb(self, dialog):
+        cwd = os.getcwd()
+        dialog = ImageSelectionDialog(cwd, DEPLOYABLE_IMAGE_TYPES, Title, self, gtk.FILE_CHOOSER_ACTION_SAVE )
+        button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
+        HobAltButton.style_button(button)
+        button = dialog.add_button("Open", gtk.RESPONSE_YES)
+        HobAltButton.style_button(button)
+        response = dialog.run()
+
+        if response == gtk.RESPONSE_YES:
+            if not dialog.image_names:
+                lbl = "<b>No selections made</b>\nClicked the radio button to select a image."
+                crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
+                button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
+                HobButton.style_button(button)
+                crumbs_dialog.run()
+                crumbs_dialog.destroy()
+                dialog.destroy()
+                return
+
+            # get the full path of image
+            image_path = os.path.join(dialog.image_folder, dialog.image_names[0])
+            self.deploy_dialog.set_image_text_buffer(image_path)
+            self.deploy_dialog.set_image_path(image_path)
+
+        dialog.destroy()
+
+def main():
+    parser = optparse.OptionParser(
+                usage = """%prog [-h] [image_file]
+
+%prog writes bootable images to USB devices. You can
+provide the image file on the command line or select it using the GUI.""")
+
+    options, args = parser.parse_args(sys.argv)
+    image_file = args[1] if len(args) > 1 else ''
+    dw = DeployWindow(image_file)
+
+if __name__ == '__main__':
+    try:
+        main()
+        gtk.main()
+    except Exception:
+        import traceback
+        traceback.print_exc(3)

+ 332 - 0
bitbake/bin/toaster

@@ -0,0 +1,332 @@
+#!/bin/echo ERROR: This script needs to be sourced. Please run as .
+
+# toaster - shell script to start Toaster
+
+# Copyright (C) 2013-2015 Intel Corp.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see http://www.gnu.org/licenses/.
+
+# Usage: source toaster [start|stop|restart-bitbake]
+#                       [webport=<port>] [noui] [noweb]
+
+# Helper function to kill a background toaster development server
+
+webserverKillAll()
+{
+    local pidfile
+    for pidfile in ${BUILDDIR}/.toastermain.pid ${BUILDDIR}/.runbuilds.pid; do
+        if [ -f ${pidfile} ]; then
+            pid=`cat ${pidfile}`
+            while kill -0 $pid 2>/dev/null; do
+                kill -SIGTERM -$pid 2>/dev/null
+                sleep 1
+                # Kill processes if they are still running - may happen
+                # in interactive shells
+                ps fux | grep "python.*manage.py runserver" | awk '{print $2}' | xargs kill
+            done
+            rm  ${pidfile}
+        fi
+    done
+}
+
+webserverStartAll()
+{
+    # do not start if toastermain points to a valid process
+    if ! cat "${BUILDDIR}/.toastermain.pid" 2>/dev/null | xargs -I{} kill -0 {} ; then
+        retval=1
+        rm "${BUILDDIR}/.toastermain.pid"
+    fi
+
+    retval=0
+    # you can always add a superuser later via
+    # ../bitbake/lib/toaster/manage.py createsuperuser --username=<ME>
+    $MANAGE migrate --noinput || retval=1
+
+    if [ $retval -eq 1 ]; then
+        echo "Failed migrations, aborting system start" 1>&2
+        return $retval
+    fi
+
+    $MANAGE checksettings --traceback || retval=1
+
+    if [ $retval -eq 1 ]; then
+        printf "\nError while checking settings; aborting\n"
+        return $retval
+    fi
+
+    echo "Starting webserver..."
+
+    $MANAGE runserver "0.0.0.0:$WEB_PORT" \
+           </dev/null >>${BUILDDIR}/toaster_web.log 2>&1 \
+           & echo $! >${BUILDDIR}/.toastermain.pid
+
+    sleep 1
+
+    if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
+        retval=1
+        rm "${BUILDDIR}/.toastermain.pid"
+    else
+        echo "Webserver address:  http://0.0.0.0:$WEB_PORT/"
+    fi
+
+    return $retval
+}
+
+INSTOPSYSTEM=0
+
+# define the stop command
+stop_system()
+{
+    # prevent reentry
+    if [ $INSTOPSYSTEM -eq 1 ]; then return; fi
+    INSTOPSYSTEM=1
+    if [ -f ${BUILDDIR}/.toasterui.pid ]; then
+        kill `cat ${BUILDDIR}/.toasterui.pid` 2>/dev/null
+        rm ${BUILDDIR}/.toasterui.pid
+    fi
+    stop_bitbake
+    webserverKillAll
+    # unset exported variables
+    unset DATABASE_URL
+    unset TOASTER_CONF
+    unset TOASTER_DIR
+    trap - SIGHUP
+    #trap - SIGCHLD
+    INSTOPSYSTEM=0
+}
+
+start_bitbake() {
+    unset BBSERVER
+    bitbake --read conf/toaster.conf --server-only -t xmlrpc -B 0.0.0.0:0
+    if [ $? -ne 0 ]; then
+        echo "Bitbake server start failed"
+        return 1
+    fi
+    export BBSERVER=0.0.0.0:-1
+    export DATABASE_URL=`$MANAGE get-dburl`
+    # we start the TOASTERUI only if not inhibited
+    if [ $NOTOASTERUI -eq 0 ]; then
+        bitbake --observe-only -u toasterui --remote-server=$BBSERVER -t xmlrpc \
+            >>${BUILDDIR}/toaster_ui.log 2>&1 \
+            & echo $! >${BUILDDIR}/.toasterui.pid
+    fi
+    return 0
+}
+
+stop_bitbake() {
+    BBSERVER=0.0.0.0:-1 bitbake -m
+    unset BBSERVER
+    # force stop any misbehaving bitbake server
+    lsof -t bitbake.lock | awk '{print $2}' | grep "[0-9]\+" | xargs -n1 -r kill
+}
+
+verify_prereq() {
+    # Verify Django version
+    reqfile=$(python -c "import os; print os.path.realpath('$BBBASEDIR/toaster-requirements.txt')")
+    exp='s/Django\([><=]\+\)\([^,]\+\),\([><=]\+\)\(.\+\)/'
+    exp=$exp'import sys,django;version=django.get_version().split(".");'
+    exp=$exp'sys.exit(not (version \1 "\2".split(".") and version \3 "\4".split(".")))/p'
+    if ! sed -n "$exp" $reqfile | python - ; then
+        req=`grep ^Django $reqfile`
+        echo "This program needs $req"
+        echo "Please install with pip install -r $reqfile"
+        return 2
+    fi
+
+    return 0
+}
+
+# read command line parameters
+if [ -n "$BASH_SOURCE" ] ; then
+    TOASTER=${BASH_SOURCE}
+elif [ -n "$ZSH_NAME" ] ; then
+    TOASTER=${(%):-%x}
+else
+    TOASTER=$0
+fi
+
+BBBASEDIR=`dirname $TOASTER`/..
+MANAGE=$BBBASEDIR/lib/toaster/manage.py
+OEROOT=`dirname $TOASTER`/../..
+
+# this is the configuraton file we are using for toaster
+# we are using the same logic that oe-setup-builddir uses
+# (based on TEMPLATECONF and .templateconf) to determine
+# which toasterconf.json to use.
+# note: There are a number of relative path assumptions
+# in the local layers that currently make using an arbitrary
+# toasterconf.json difficult.
+
+. $OEROOT/.templateconf
+if [ -n "$TEMPLATECONF" ]; then
+    if [ ! -d "$TEMPLATECONF" ]; then
+        # Allow TEMPLATECONF=meta-xyz/conf as a shortcut
+        if [ -d "$OEROOT/$TEMPLATECONF" ]; then
+            TEMPLATECONF="$OEROOT/$TEMPLATECONF"
+        fi
+        if [ ! -d "$TEMPLATECONF" ]; then
+            echo >&2 "Error: '$TEMPLATECONF' must be a directory containing toasterconf.json"
+            return 1
+        fi
+    fi
+fi
+
+if [ "$TOASTER_CONF" = "" ]; then
+    TOASTER_CONF="$TEMPLATECONF/toasterconf.json"
+    export TOASTER_CONF=$(python -c "import os; print os.path.realpath('$TOASTER_CONF')")
+fi
+
+if [ ! -f $TOASTER_CONF ]; then
+    echo "$TOASTER_CONF configuration file not found. Set TOASTER_CONF to specify file or fix .templateconf"
+    return 1
+fi
+
+# this defines the dir toaster will use for
+# 1) clones of layers (in _toaster_clones )
+# 2) the build dir (in build)
+# 3) the sqlite db if that is being used.
+# 4) pid's we need to clean up on exit/shutdown
+# note: for future. in order to make this an arbitrary directory, we need to
+# make sure that the toaster.sqlite file doesn't default to `pwd` like it currently does.
+export TOASTER_DIR=`pwd`
+
+NOTOASTERUI=0
+WEBSERVER=1
+WEB_PORT="8000"
+unset CMD
+for param in $*; do
+    case $param in
+    noui )
+            NOTOASTERUI=1
+    ;;
+    noweb )
+            WEBSERVER=0
+    ;;
+    start )
+            CMD=$param
+    ;;
+    stop )
+            CMD=$param
+    ;;
+    webport=*)
+            WEB_PORT="${param#*=}"
+    esac
+done
+
+if [ `basename \"$0\"` = `basename \"${TOASTER}\"` ]; then
+    echo "Error: This script needs to be sourced. Please run as . $TOASTER"
+    return 1
+fi
+
+if [ "$1" = 'restart-bitbake' ] ; then
+    stop_bitbake
+    sleep 1
+    start_bitbake
+    rc=$?
+    sleep 3
+    return $rc
+fi
+
+verify_prereq || return 1
+
+# We make sure we're running in the current shell and in a good environment
+if [ -z "$BUILDDIR" ] ||  ! which bitbake >/dev/null 2>&1 ; then
+    echo "Error: Build environment is not setup or bitbake is not in path." 1>&2
+    return 2
+fi
+
+# this defines the dir toaster will use for
+# 1) clones of layers (in _toaster_clones )
+# 2) the build dir (in build)
+# 3) the sqlite db if that is being used.
+# 4) pid's we need to clean up on exit/shutdown
+# note: for future. in order to make this an arbitrary directory, we need to
+# make sure that the toaster.sqlite file doesn't default to `pwd`
+# like it currently does.
+export TOASTER_DIR=`dirname $BUILDDIR`
+
+# Determine the action. If specified by arguments, fine, if not, toggle it
+if [ "$CMD" = "start" ] ; then
+    if [ -n "$BBSERVER" ]; then
+	echo " Toaster is already running. Exiting..."
+	return 1
+fi
+elif [ "$CMD" = "" ]; then
+    if [ -z "$BBSERVER" ]; then
+        CMD="start"
+    else
+        CMD="stop"
+    fi
+fi
+
+echo "The system will $CMD."
+
+# Execute the commands
+
+case $CMD in
+    start )
+        # check if addr:port is not in use
+        if [ "$CMD" == 'start' ]; then
+             $MANAGE checksocket "0.0.0.0:$WEB_PORT" || return 1
+        fi
+
+        # Make sure it's safe to start by checking bitbake lock
+        if [ -e $BUILDDIR/bitbake.lock ]; then
+            python -c "import fcntl; fcntl.flock(open(\"$BUILDDIR/bitbake.lock\"), fcntl.LOCK_EX|fcntl.LOCK_NB)" 2>/dev/null
+            if [ $? -ne 0 ] ; then
+                echo "Error: bitbake lock state error. File locks show that the system is on." 1>&2
+                echo "Please wait for the current build to finish, stop and then start the system again." 1>&2
+                return 3
+            fi
+        fi
+
+        # kill Toaster web server if it's alive
+        if [ -e $BUILDDIR/.toastermain.pid ] && kill -0 `cat $BUILDDIR/.toastermain.pid`; then
+            echo "Warning: bitbake appears to be dead, but the Toaster web server is running." 1>&2
+            echo " Something fishy is going on." 1>&2
+            echo "Cleaning up the web server to start from a clean slate."
+            webserverKillAll
+        fi
+
+        # Create configuration file
+        conf=${BUILDDIR}/conf/toaster.conf
+        echo "# Created by toaster start script" > $conf
+        echo "INHERIT+=\"toaster buildhistory\"" >> $conf
+
+        if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then
+            echo "Failed ${CMD}."
+            return 4
+        fi
+        start_bitbake
+        if [ $? -eq 0 ]; then
+            $MANAGE runbuilds & echo $! >${BUILDDIR}/.runbuilds.pid
+            # set fail safe stop system on terminal exit
+            trap stop_system SIGHUP
+            echo "Successful ${CMD}."
+            return 0
+        else
+            # failed start, do stop
+            stop_system
+            echo "Failed ${CMD}."
+            return 1
+        fi
+        # stop system on terminal exit
+        set -o monitor
+        trap stop_system SIGHUP
+    ;;
+    stop )
+        stop_system
+        echo "Successful ${CMD}."
+    ;;
+esac

+ 174 - 0
bitbake/bin/toaster-eventreplay

@@ -0,0 +1,174 @@
+#!/usr/bin/env python
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Copyright (C) 2014        Alex Damian
+#
+# This file re-uses code spread throughout other Bitbake source files.
+# As such, all other copyrights belong to their own right holders.
+#
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+
+# This command takes a filename as a single parameter. The filename is read
+# as a build eventlog, and the ToasterUI is used to process events in the file
+# and log data in the database
+
+from __future__ import print_function
+import os
+import sys, logging
+
+# mangle syspath to allow easy import of modules
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
+                                'lib'))
+
+
+import bb.cooker
+from bb.ui import toasterui
+import sys
+import logging
+
+import json, pickle
+
+
+class FileReadEventsServerConnection():
+    """  Emulates a connection to a bitbake server that feeds
+        events coming actually read from a saved log file.
+    """
+
+    class MockConnection():
+        """ fill-in for the proxy to the server. we just return generic data
+        """
+        def __init__(self, sc):
+            self._sc = sc
+
+        def runCommand(self, commandArray):
+            """ emulates running a command on the server; only read-only commands are accepted """
+            command_name = commandArray[0]
+
+            if command_name == "getVariable":
+                if commandArray[1] in self._sc._variables:
+                    return (self._sc._variables[commandArray[1]]['v'], None)
+                return (None, "Missing variable")
+
+            elif command_name == "getAllKeysWithFlags":
+                dump = {}
+                flaglist = commandArray[1]
+                for k in self._sc._variables.keys():
+                    try:
+                        if not k.startswith("__"):
+                            v = self._sc._variables[k]['v']
+                            dump[k] = {
+                                'v' : v ,
+                                'history' : self._sc._variables[k]['history'],
+                            }
+                            for d in flaglist:
+                                dump[k][d] = self._sc._variables[k][d]
+                    except Exception as e:
+                        print(e)
+                return (dump, None)
+            else:
+                raise Exception("Command %s not implemented" % commandArray[0])
+
+        def terminateServer(self):
+            """ do not do anything """
+            pass
+
+
+
+    class EventReader():
+        def __init__(self, sc):
+            self._sc = sc
+            self.firstraise = 0
+
+        def _create_event(self, line):
+            def _import_class(name):
+                assert len(name) > 0
+                assert "." in name, name
+
+                components = name.strip().split(".")
+                modulename = ".".join(components[:-1])
+                moduleklass = components[-1]
+
+                module = __import__(modulename, fromlist=[str(moduleklass)])
+                return getattr(module, moduleklass)
+
+            # we build a toaster event out of current event log line
+            try:
+                event_data = json.loads(line.strip())
+                event_class = _import_class(event_data['class'])
+                event_object = pickle.loads(json.loads(event_data['vars']))
+            except ValueError as e:
+                print("Failed loading ", line)
+                raise e
+
+            if not isinstance(event_object, event_class):
+                raise Exception("Error loading objects %s class %s ", event_object, event_class)
+
+            return event_object
+
+        def waitEvent(self, timeout):
+
+            nextline = self._sc._eventfile.readline()
+            if len(nextline) == 0:
+                # the build data ended, while toasterui still waits for events.
+                # this happens when the server was abruptly stopped, so we simulate this
+                self.firstraise += 1
+                if self.firstraise == 1:
+                    raise KeyboardInterrupt()
+                else:
+                    return None
+            else:
+                self._sc.lineno += 1
+            return self._create_event(nextline)
+
+
+    def _readVariables(self, variableline):
+        self._variables = json.loads(variableline.strip())['allvariables']
+
+
+    def __init__(self, file_name):
+        self.connection = FileReadEventsServerConnection.MockConnection(self)
+        self._eventfile = open(file_name, "r")
+
+        # we expect to have the variable dump at the start of the file
+        self.lineno = 1
+        self._readVariables(self._eventfile.readline())
+
+        self.events = FileReadEventsServerConnection.EventReader(self)
+
+
+
+
+
+class MockConfigParameters():
+    """ stand-in for cookerdata.ConfigParameters; as we don't really config a cooker, this
+        serves just to supply needed interfaces for the toaster ui to work """
+    def __init__(self):
+        self.observe_only = True            # we can only read files
+
+
+# run toaster ui on our mock bitbake class
+if __name__ == "__main__":
+    if len(sys.argv) < 2:
+        print("Usage: %s event.log " % sys.argv[0])
+        sys.exit(1)
+
+    file_name = sys.argv[-1]
+    mock_connection = FileReadEventsServerConnection(file_name)
+    configParams = MockConfigParameters()
+
+    # run the main program and set exit code to the returned value
+    sys.exit(toasterui.main(mock_connection.connection, mock_connection.events, configParams))

+ 67 - 0
bitbake/classes/base.bbclass

@@ -0,0 +1,67 @@
+# Copyright (C) 2003  Chris Larson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+# 
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+# 
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+
+die() {
+	bbfatal "$*"
+}
+
+bbnote() {
+	echo "NOTE:" "$*"
+}
+
+bbwarn() {
+	echo "WARNING:" "$*"
+}
+
+bbfatal() {
+	echo "FATAL:" "$*"
+	exit 1
+}
+
+addtask showdata
+do_showdata[nostamp] = "1"
+python do_showdata() {
+	import sys
+	# emit variables and shell functions
+	bb.data.emit_env(sys.__stdout__, d, True)
+	# emit the metadata which isnt valid shell
+	for e in bb.data.keys(d):
+		if d.getVarFlag(e, 'python'):
+			bb.plain("\npython %s () {\n%s}" % (e, d.getVar(e, True)))
+}
+
+addtask listtasks
+do_listtasks[nostamp] = "1"
+python do_listtasks() {
+	import sys
+	for e in bb.data.keys(d):
+		if d.getVarFlag(e, 'task'):
+			bb.plain("%s" % e)
+}
+
+addtask build
+do_build[dirs] = "${TOPDIR}"
+do_build[nostamp] = "1"
+python base_do_build () {
+	bb.note("The included, default BB base.bbclass does not define a useful default task.")
+	bb.note("Try running the 'listtasks' task against a .bb to see what tasks are defined.")
+}
+
+EXPORT_FUNCTIONS do_clean do_mrproper do_build

+ 50 - 0
bitbake/conf/bitbake.conf

@@ -0,0 +1,50 @@
+# Copyright (C) 2003  Chris Larson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+# 
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+# 
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+
+B = "${S}"
+CVSDIR = "${DL_DIR}/cvs"
+DEPENDS = ""
+DEPLOY_DIR = "${TMPDIR}/deploy"
+DEPLOY_DIR_IMAGE = "${DEPLOY_DIR}/images"
+DL_DIR = "${TMPDIR}/downloads"
+FILESDIR = "${@bb.utils.which(d.getVar('FILESPATH', True), '.')}"
+FILESPATH = "${FILE_DIRNAME}/${PF}:${FILE_DIRNAME}/${P}:${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/files:${FILE_DIRNAME}"
+FILE_DIRNAME = "${@os.path.dirname(d.getVar('FILE', False))}"
+GITDIR = "${DL_DIR}/git"
+IMAGE_CMD = "_NO_DEFINED_IMAGE_TYPES_"
+IMAGE_ROOTFS = "${TMPDIR}/rootfs"
+OVERRIDES = "local:${MACHINE}:${TARGET_OS}:${TARGET_ARCH}"
+P = "${PN}-${PV}"
+PERSISTENT_DIR = "${TMPDIR}/cache"
+PF = "${PN}-${PV}-${PR}"
+PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
+PR = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[2] or 'r0'}"
+PROVIDES = ""
+PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
+S = "${WORKDIR}/${P}"
+SRC_URI = "file://${FILE}"
+STAMP = "${TMPDIR}/stamps/${PF}"
+SVNDIR = "${DL_DIR}/svn"
+T = "${WORKDIR}/temp"
+TARGET_ARCH = "${BUILD_ARCH}"
+TMPDIR = "${TOPDIR}/tmp"
+WORKDIR = "${TMPDIR}/work/${PF}"
+PERSISTENT_DIR = "${TMPDIR}/cache"
+GITPKGV = "${@bb.fetch2.get_srcrev(d, 'gitpkgv_revision')}"

+ 1 - 0
bitbake/contrib/README

@@ -0,0 +1 @@
+This directory is for additional contributed files which may be useful.

+ 31 - 0
bitbake/contrib/bbdev.sh

@@ -0,0 +1,31 @@
+# This is a shell function to be sourced into your shell or placed in your .profile,
+# which makes setting things up for BitBake a bit easier.
+#
+# The author disclaims copyright to the contents of this file and places it in the
+# public domain.
+
+bbdev () {
+	local BBDIR PKGDIR BUILDDIR
+	if test x"$1" = "x--help"; then echo >&2 "syntax: bbdev [bbdir [pkgdir [builddir]]]"; return 1; fi
+	if test x"$1" = x; then BBDIR=`pwd`; else BBDIR=$1; fi
+	if test x"$2" = x; then PKGDIR=`pwd`; else PKGDIR=$2; fi
+	if test x"$3" = x; then BUILDDIR=`pwd`; else BUILDDIR=$3; fi
+
+	BBDIR=`readlink -f $BBDIR`
+	PKGDIR=`readlink -f $PKGDIR`
+	BUILDDIR=`readlink -f $BUILDDIR`
+	if ! (test -d $BBDIR && test -d $PKGDIR && test -d $BUILDDIR); then
+		echo >&2 "syntax: bbdev [bbdir [pkgdir [builddir]]]"
+		return 1
+	fi
+	
+	PATH=$BBDIR/bin:$PATH
+	BBPATH=$BBDIR
+	if test x"$BBDIR" != x"$PKGDIR"; then
+		BBPATH=$PKGDIR:$BBPATH
+	fi
+	if test x"$PKGDIR" != x"$BUILDDIR"; then
+		BBPATH=$BUILDDIR:$BBPATH
+	fi
+	export BBPATH
+}

+ 68 - 0
bitbake/contrib/dump_cache.py

@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Copyright (C) 2012 Wind River Systems, Inc.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+#
+# This is used for dumping the bb_cache.dat, the output format is:
+# recipe_path PN PV PACKAGES
+#
+import os
+import sys
+import warnings
+
+# For importing bb.cache
+sys.path.insert(0, os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), '../lib'))
+from bb.cache import CoreRecipeInfo
+
+import cPickle as pickle
+
+def main(argv=None):
+    """
+    Get the mapping for the target recipe.
+    """
+    if len(argv) != 1:
+        print >>sys.stderr, "Error, need one argument!"
+        return 2
+
+    cachefile = argv[0]
+
+    with open(cachefile, "rb") as cachefile:
+        pickled = pickle.Unpickler(cachefile)
+        while cachefile:
+            try:
+                key = pickled.load()
+                val = pickled.load()
+            except Exception:
+                break
+            if isinstance(val, CoreRecipeInfo) and (not val.skipped):
+                pn = val.pn
+                # Filter out the native recipes.
+                if key.startswith('virtual:native:') or pn.endswith("-native"):
+                    continue
+
+                # 1.0 is the default version for a no PV recipe.
+                if val.__dict__.has_key("pv"):
+                    pv = val.pv
+                else:
+                    pv = "1.0"
+
+                print("%s %s %s %s" % (key, pn, pv, ' '.join(val.packages)))
+
+if __name__ == "__main__":
+    sys.exit(main(sys.argv[1:]))
+

+ 24 - 0
bitbake/contrib/vim/ftdetect/bitbake.vim

@@ -0,0 +1,24 @@
+" Vim filetype detection file
+" Language:     BitBake
+" Author:       Ricardo Salveti <rsalveti@rsalveti.net>
+" Copyright:    Copyright (C) 2008  Ricardo Salveti <rsalveti@rsalveti.net>
+" Licence:      You may redistribute this under the same terms as Vim itself
+"
+" This sets up the syntax highlighting for BitBake files, like .bb, .bbclass and .inc
+
+if &compatible || version < 600
+    finish
+endif
+
+" .bb, .bbappend and .bbclass
+au BufNewFile,BufRead *.{bb,bbappend,bbclass}	set filetype=bitbake
+
+" .inc
+au BufNewFile,BufRead *.inc		set filetype=bitbake
+
+" .conf
+au BufNewFile,BufRead *.conf
+    \ if (match(expand("%:p:h"), "conf") > 0) |
+    \     set filetype=bitbake |
+    \ endif
+

+ 2 - 0
bitbake/contrib/vim/ftplugin/bitbake.vim

@@ -0,0 +1,2 @@
+set sts=4 sw=4 et
+set cms=#%s

+ 84 - 0
bitbake/contrib/vim/plugin/newbb.vim

@@ -0,0 +1,84 @@
+" Vim plugin file
+" Purpose:	Create a template for new bb files
+" Author:	Ricardo Salveti <rsalveti@gmail.com>
+" Copyright:	Copyright (C) 2008 Ricardo Salveti <rsalveti@gmail.com>
+"
+" This file is licensed under the MIT license, see COPYING.MIT in
+" this source distribution for the terms.
+"
+" Based on the gentoo-syntax package
+"
+" Will try to use git to find the user name and email
+
+if &compatible || v:version < 600
+    finish
+endif
+
+fun! <SID>GetUserName()
+    let l:user_name = system("git config --get user.name")
+    if v:shell_error
+        return "Unknown User"
+    else
+        return substitute(l:user_name, "\n", "", "")
+endfun
+
+fun! <SID>GetUserEmail()
+    let l:user_email = system("git config --get user.email")
+    if v:shell_error
+        return "unknow@user.org"
+    else
+        return substitute(l:user_email, "\n", "", "")
+endfun
+
+fun! BBHeader()
+    let l:current_year = strftime("%Y")
+    let l:user_name = <SID>GetUserName()
+    let l:user_email = <SID>GetUserEmail()
+    0 put ='# Copyright (C) ' . l:current_year .
+                \ ' ' . l:user_name . ' <' . l:user_email . '>'
+    put ='# Released under the MIT license (see COPYING.MIT for the terms)'
+    $
+endfun
+
+fun! NewBBTemplate()
+    let l:paste = &paste
+    set nopaste
+    
+    " Get the header
+    call BBHeader()
+
+    " New the bb template
+    put ='DESCRIPTION = \"\"'
+    put ='HOMEPAGE = \"\"'
+    put ='LICENSE = \"\"' 
+    put ='SECTION = \"\"'
+    put ='DEPENDS = \"\"'
+    put =''
+    put ='SRC_URI = \"\"'
+
+    " Go to the first place to edit
+    0
+    /^DESCRIPTION =/
+    exec "normal 2f\""
+
+    if paste == 1
+        set paste
+    endif
+endfun
+
+if !exists("g:bb_create_on_empty")
+    let g:bb_create_on_empty = 1
+endif
+
+" disable in case of vimdiff
+if v:progname =~ "vimdiff"
+    let g:bb_create_on_empty = 0
+endif
+
+augroup NewBB
+    au BufNewFile *.bb
+                \ if g:bb_create_on_empty |
+                \    call NewBBTemplate() |
+                \ endif
+augroup END
+

+ 126 - 0
bitbake/contrib/vim/syntax/bitbake.vim

@@ -0,0 +1,126 @@
+" Vim syntax file
+" Language:     BitBake bb/bbclasses/inc
+" Author:       Chris Larson <kergoth@handhelds.org>
+"               Ricardo Salveti <rsalveti@rsalveti.net>
+" Copyright:    Copyright (C) 2004  Chris Larson <kergoth@handhelds.org>
+"               Copyright (C) 2008  Ricardo Salveti <rsalveti@rsalveti.net>
+"
+" This file is licensed under the MIT license, see COPYING.MIT in
+" this source distribution for the terms.
+"
+" Syntax highlighting for bb, bbclasses and inc files.
+"
+" It's an entirely new type, just has specific syntax in shell and python code
+
+if &compatible || v:version < 600
+    finish
+endif
+if exists("b:current_syntax")
+    finish
+endif
+
+syn include @python syntax/python.vim
+if exists("b:current_syntax")
+  unlet b:current_syntax
+endif
+
+" BitBake syntax
+
+" Matching case
+syn case match
+
+" Indicates the error when nothing is matched
+syn match bbUnmatched           "."
+
+" Comments
+syn cluster bbCommentGroup      contains=bbTodo,@Spell
+syn keyword bbTodo              COMBAK FIXME TODO XXX contained
+syn match bbComment             "#.*$" contains=@bbCommentGroup
+
+" String helpers
+syn match bbQuote               +['"]+ contained 
+syn match bbDelimiter           "[(){}=]" contained
+syn match bbArrayBrackets       "[\[\]]" contained
+
+" BitBake strings
+syn match bbContinue            "\\$"
+syn region bbString             matchgroup=bbQuote start=+"+ skip=+\\$+ end=+"+ contained contains=bbTodo,bbContinue,bbVarDeref,bbVarPyValue,@Spell
+syn region bbString             matchgroup=bbQuote start=+'+ skip=+\\$+ end=+'+ contained contains=bbTodo,bbContinue,bbVarDeref,bbVarPyValue,@Spell
+
+" Vars definition
+syn match bbExport            "^export" nextgroup=bbIdentifier skipwhite
+syn keyword bbExportFlag        export contained nextgroup=bbIdentifier skipwhite
+syn match bbIdentifier          "[a-zA-Z0-9\-_\.\/\+]\+" display contained
+syn match bbVarDeref            "${[a-zA-Z0-9\-_\.\/\+]\+}" contained
+syn match bbVarEq               "\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)" contained nextgroup=bbVarValue
+syn match bbVarDef              "^\(export\s*\)\?\([a-zA-Z0-9\-_\.\/\+]\+\(_[${}a-zA-Z0-9\-_\.\/\+]\+\)\?\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq
+syn match bbVarValue            ".*$" contained contains=bbString,bbVarDeref,bbVarPyValue
+syn region bbVarPyValue         start=+${@+ skip=+\\$+ end=+}+ contained contains=@python
+
+" Vars metadata flags
+syn match bbVarFlagDef          "^\([a-zA-Z0-9\-_\.]\+\)\(\[[a-zA-Z0-9\-_\.]\+\]\)\@=" contains=bbIdentifier nextgroup=bbVarFlagFlag
+syn region bbVarFlagFlag        matchgroup=bbArrayBrackets start="\[" end="\]\s*\(=\|+=\|=+\|?=\)\@=" contained contains=bbIdentifier nextgroup=bbVarEq
+
+" Includes and requires
+syn keyword bbInclude           inherit include require contained 
+syn match bbIncludeRest         ".*$" contained contains=bbString,bbVarDeref
+syn match bbIncludeLine         "^\(inherit\|include\|require\)\s\+" contains=bbInclude nextgroup=bbIncludeRest
+
+" Add taks and similar
+syn keyword bbStatement         addtask addhandler after before EXPORT_FUNCTIONS contained
+syn match bbStatementRest       ".*$" skipwhite contained contains=bbStatement
+syn match bbStatementLine       "^\(addtask\|addhandler\|after\|before\|EXPORT_FUNCTIONS\)\s\+" contains=bbStatement nextgroup=bbStatementRest
+
+" OE Important Functions
+syn keyword bbOEFunctions       do_fetch do_unpack do_patch do_configure do_compile do_stage do_install do_package contained
+
+" Generic Functions
+syn match bbFunction            "\h[0-9A-Za-z_-]*" display contained contains=bbOEFunctions
+
+" BitBake shell metadata
+syn include @shell syntax/sh.vim
+if exists("b:current_syntax")
+  unlet b:current_syntax
+endif
+syn keyword bbShFakeRootFlag    fakeroot contained
+syn match bbShFuncDef           "^\(fakeroot\s*\)\?\([0-9A-Za-z_${}-]\+\)\(python\)\@<!\(\s*()\s*\)\({\)\@=" contains=bbShFakeRootFlag,bbFunction,bbVarDeref,bbDelimiter nextgroup=bbShFuncRegion skipwhite
+syn region bbShFuncRegion       matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" contained contains=@shell
+
+" Python value inside shell functions
+syn region shDeref         start=+${@+ skip=+\\$+ excludenl end=+}+ contained contains=@python
+
+" BitBake python metadata
+syn keyword bbPyFlag            python contained
+syn match bbPyFuncDef           "^\(python\s\+\)\([0-9A-Za-z_${}-]\+\)\?\(\s*()\s*\)\({\)\@=" contains=bbPyFlag,bbFunction,bbVarDeref,bbDelimiter nextgroup=bbPyFuncRegion skipwhite
+syn region bbPyFuncRegion       matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" contained contains=@python
+
+" BitBake 'def'd python functions
+syn keyword bbPyDef             def contained
+syn region bbPyDefRegion        start='^\(def\s\+\)\([0-9A-Za-z_-]\+\)\(\s*(.*)\s*\):\s*$' end='^\(\s\|$\)\@!' contains=@python
+
+" Highlighting Definitions
+hi def link bbUnmatched         Error
+hi def link bbInclude           Include
+hi def link bbTodo              Todo
+hi def link bbComment           Comment
+hi def link bbQuote             String
+hi def link bbString            String
+hi def link bbDelimiter         Keyword
+hi def link bbArrayBrackets     Statement
+hi def link bbContinue          Special
+hi def link bbExport            Type
+hi def link bbExportFlag        Type
+hi def link bbIdentifier	    Identifier
+hi def link bbVarDeref          PreProc
+hi def link bbVarDef            Identifier
+hi def link bbVarValue          String
+hi def link bbShFakeRootFlag    Type
+hi def link bbFunction          Function
+hi def link bbPyFlag            Type
+hi def link bbPyDef             Statement
+hi def link bbStatement         Statement
+hi def link bbStatementRest     Identifier
+hi def link bbOEFunctions       Special
+hi def link bbVarPyValue        PreProc
+
+let b:current_syntax = "bb"

+ 339 - 0
bitbake/doc/COPYING.GPL

@@ -0,0 +1,339 @@
+		    GNU GENERAL PUBLIC LICENSE
+		       Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+			    Preamble
+
+  The licenses for most software are designed to take away your
+freedom to share and change it.  By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users.  This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it.  (Some other Free Software Foundation software is covered by
+the GNU Lesser General Public License instead.)  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+  To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have.  You must make sure that they, too, receive or can get the
+source code.  And you must show them these terms so they know their
+rights.
+
+  We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+  Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software.  If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+  Finally, any free program is threatened constantly by software
+patents.  We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary.  To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+		    GNU GENERAL PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License.  The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language.  (Hereinafter, translation is included without limitation in
+the term "modification".)  Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope.  The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+  1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+  2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices
+    stating that you changed the files and the date of any change.
+
+    b) You must cause any work that you distribute or publish, that in
+    whole or in part contains or is derived from the Program or any
+    part thereof, to be licensed as a whole at no charge to all third
+    parties under the terms of this License.
+
+    c) If the modified program normally reads commands interactively
+    when run, you must cause it, when started running for such
+    interactive use in the most ordinary way, to print or display an
+    announcement including an appropriate copyright notice and a
+    notice that there is no warranty (or else, saying that you provide
+    a warranty) and that users may redistribute the program under
+    these conditions, and telling the user how to view a copy of this
+    License.  (Exception: if the Program itself is interactive but
+    does not normally print such an announcement, your work based on
+    the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole.  If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works.  But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+  3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable
+    source code, which must be distributed under the terms of Sections
+    1 and 2 above on a medium customarily used for software interchange; or,
+
+    b) Accompany it with a written offer, valid for at least three
+    years, to give any third party, for a charge no more than your
+    cost of physically performing source distribution, a complete
+    machine-readable copy of the corresponding source code, to be
+    distributed under the terms of Sections 1 and 2 above on a medium
+    customarily used for software interchange; or,
+
+    c) Accompany it with the information you received as to the offer
+    to distribute corresponding source code.  (This alternative is
+    allowed only for noncommercial distribution and only if you
+    received the program in object code or executable form with such
+    an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it.  For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable.  However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+  4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License.  Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+  5. You are not required to accept this License, since you have not
+signed it.  However, nothing else grants you permission to modify or
+distribute the Program or its derivative works.  These actions are
+prohibited by law if you do not accept this License.  Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+  6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions.  You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+  7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all.  For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices.  Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+  8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded.  In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+  9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number.  If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation.  If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+  10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission.  For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this.  Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+			    NO WARRANTY
+
+  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+		     END OF TERMS AND CONDITIONS
+
+	    How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License along
+    with this program; if not, write to the Free Software Foundation, Inc.,
+    51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+    Gnomovision version 69, Copyright (C) year name of author
+    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary.  Here is a sample; alter the names:
+
+  Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+  `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+  <signature of Ty Coon>, 1 April 1989
+  Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs.  If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library.  If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.

+ 17 - 0
bitbake/doc/COPYING.MIT

@@ -0,0 +1,17 @@
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+THE USE OR OTHER DEALINGS IN THE SOFTWARE.

+ 91 - 0
bitbake/doc/Makefile

@@ -0,0 +1,91 @@
+# This is a single Makefile to handle all generated BitBake documents.
+# The Makefile needs to live in the documentation directory and all figures used
+# in any manuals must be .PNG files and live in the individual book's figures
+# directory.
+#
+# The Makefile has these targets:
+#
+#    pdf:      generates a PDF version of a manual.
+#    html:     generates an HTML version of a manual.
+#    tarball:  creates a tarball for the doc files.
+#    validate: validates
+#    clean:    removes files
+#
+# The Makefile generates an HTML version of every document.  The
+# variable DOC indicates the folder name for a given manual.
+#
+# To build a manual, you must invoke 'make' with the DOC argument.
+#
+# Examples:
+#
+#     make DOC=bitbake-user-manual
+#     make pdf DOC=bitbake-user-manual
+#
+# The first example generates the HTML version of the User Manual.
+# The second example generates the PDF version of the User Manual.
+#
+
+ifeq ($(DOC),bitbake-user-manual)
+XSLTOPTS = --stringparam html.stylesheet bitbake-user-manual-style.css \
+           --stringparam  chapter.autolabel 1 \
+           --stringparam  section.autolabel 1 \
+           --stringparam  section.label.includes.component.label 1 \
+           --xinclude
+ALLPREQ = html tarball
+TARFILES = bitbake-user-manual-style.css bitbake-user-manual.html figures/bitbake-title.png
+MANUALS = $(DOC)/$(DOC).html
+FIGURES = figures
+STYLESHEET = $(DOC)/*.css
+
+endif
+
+##
+# These URI should be rewritten by your distribution's xml catalog to
+# match your localy installed XSL stylesheets.
+XSL_BASE_URI  = http://docbook.sourceforge.net/release/xsl/current
+XSL_XHTML_URI = $(XSL_BASE_URI)/xhtml/docbook.xsl
+
+all: $(ALLPREQ)
+
+pdf:
+ifeq ($(DOC),bitbake-user-manual)
+	@echo " "
+	@echo "********** Building."$(DOC)
+	@echo " "
+	cd $(DOC); ../tools/docbook-to-pdf $(DOC).xml ../template; cd ..
+endif
+
+html:
+ifeq ($(DOC),bitbake-user-manual)
+#       See http://www.sagehill.net/docbookxsl/HtmlOutput.html
+	@echo " "
+	@echo "******** Building "$(DOC)
+	@echo " "
+	cd $(DOC); xsltproc $(XSLTOPTS) -o $(DOC).html $(DOC)-customization.xsl $(DOC).xml; cd ..
+endif
+
+tarball: html
+	@echo " "
+	@echo "******** Creating Tarball of document files"
+	@echo " "
+	cd $(DOC); tar -cvzf $(DOC).tgz $(TARFILES); cd ..
+
+validate:
+	cd $(DOC); xmllint --postvalid --xinclude --noout $(DOC).xml; cd ..
+
+publish:
+	@if test -f $(DOC)/$(DOC).html; \
+	  then \
+            echo " "; \
+            echo "******** Publishing "$(DOC)".html"; \
+            echo " "; \
+            scp -r $(MANUALS) $(STYLESHEET) docs.yp:/var/www/www.yoctoproject.org-docs/$(VER)/$(DOC); \
+            cd $(DOC); scp -r $(FIGURES) docs.yp:/var/www/www.yoctoproject.org-docs/$(VER)/$(DOC); \
+	else \
+          echo " "; \
+          echo $(DOC)".html missing.  Generate the file first then try again."; \
+          echo " "; \
+	fi
+
+clean:
+	rm -rf $(MANUALS); rm $(DOC)/$(DOC).tgz;

+ 39 - 0
bitbake/doc/README

@@ -0,0 +1,39 @@
+Documentation
+=============
+
+This is the directory that contains the BitBake documentation. 
+
+Manual Organization
+===================
+
+Folders exist for individual manuals as follows:
+
+* bitbake-user-manual      - The BitBake User Manual 
+
+Each folder is self-contained regarding content and figures.
+
+If you want to find HTML versions of the BitBake manuals on the web, 
+go to http://www.openembedded.org/wiki/Documentation. 
+
+Makefile
+========
+
+The Makefile processes manual directories to create HTML, PDF,
+tarballs, etc.  Details on how the Makefile work are documented
+inside the Makefile.  See that file for more information.
+
+To build a manual, you run the make command and pass it the name
+of the folder containing the manual's contents. 
+For example, the following command run from the documentation directory 
+creates an HTML and a PDF version of the BitBake User Manual.
+The DOC variable specifies the manual you are making:
+
+     $ make DOC=bitbake-user-manual
+
+template
+========
+Contains various templates, fonts, and some old PNG files.
+
+tools
+=====
+Contains a tool to convert the DocBook files to PDF format.

+ 29 - 0
bitbake/doc/bitbake-user-manual/bitbake-user-manual-customization.xsl

@@ -0,0 +1,29 @@
+<?xml version='1.0'?>
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns="http://www.w3.org/1999/xhtml" xmlns:fo="http://www.w3.org/1999/XSL/Format" version="1.0">
+
+  <xsl:import href="http://downloads.yoctoproject.org/mirror/docbook-mirror/docbook-xsl-1.76.1/xhtml/docbook.xsl" />
+
+<!--
+
+  <xsl:import href="../template/1.76.1/docbook-xsl-1.76.1/xhtml/docbook.xsl" />
+
+  <xsl:import href="http://docbook.sourceforge.net/release/xsl/1.76.1/xhtml/docbook.xsl" />
+
+-->
+
+  <xsl:include href="../template/permalinks.xsl"/>
+  <xsl:include href="../template/section.title.xsl"/>
+  <xsl:include href="../template/component.title.xsl"/>
+  <xsl:include href="../template/division.title.xsl"/>
+  <xsl:include href="../template/formal.object.heading.xsl"/>
+  <xsl:include href="../template/gloss-permalinks.xsl"/>
+
+  <xsl:param name="html.stylesheet" select="'user-manual-style.css'" />
+  <xsl:param name="chapter.autolabel" select="1" />
+  <xsl:param name="section.autolabel" select="1" />
+  <xsl:param name="section.label.includes.component.label" select="1" />
+  <xsl:param name="appendix.autolabel">A</xsl:param>
+
+<!--  <xsl:param name="generate.toc" select="'article nop'"></xsl:param>  -->
+
+</xsl:stylesheet>

+ 931 - 0
bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.xml

@@ -0,0 +1,931 @@
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
+"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
+
+<chapter id="bitbake-user-manual-execution">
+    <title>Execution</title>
+
+    <para>
+        The primary purpose for running BitBake is to produce some kind
+        of output such as a single installable package, a kernel, a software
+        development kit, or even a full, board-specific bootable Linux image,
+        complete with bootloader, kernel, and root filesystem.
+        Of course, you can execute the <filename>bitbake</filename>
+        command with options that cause it to execute single tasks,
+        compile single recipe files, capture or clear data, or simply
+        return information about the execution environment.
+    </para>
+
+    <para>
+        This chapter describes BitBake's execution process from start
+        to finish when you use it to create an image.
+        The execution process is launched using the following command
+        form:
+        <literallayout class='monospaced'>
+     $ bitbake <replaceable>target</replaceable>
+        </literallayout>
+        For information on the BitBake command and its options,
+        see
+        "<link linkend='bitbake-user-manual-command'>The BitBake Command</link>"
+        section.
+        <note>
+            <para>
+                Prior to executing BitBake, you should take advantage of available
+                parallel thread execution on your build host by setting the
+                <link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
+                variable in your project's <filename>local.conf</filename>
+                configuration file.
+            </para>
+
+            <para>
+                A common method to determine this value for your build host is to run
+                the following:
+                <literallayout class='monospaced'>
+     $ grep processor /proc/cpuinfo
+                </literallayout>
+                This command returns the number of processors, which takes into
+                account hyper-threading.
+                Thus, a quad-core build host with hyper-threading most likely
+                shows eight processors, which is the value you would then assign to
+                <filename>BB_NUMBER_THREADS</filename>.
+            </para>
+
+            <para>
+                A possibly simpler solution is that some Linux distributions
+                (e.g. Debian and Ubuntu) provide the <filename>ncpus</filename> command.
+            </para>
+        </note>
+    </para>
+
+    <section id='parsing-the-base-configuration-metadata'>
+        <title>Parsing the Base Configuration Metadata</title>
+
+        <para>
+            The first thing BitBake does is parse base configuration
+            metadata.
+            Base configuration metadata consists of your project's
+            <filename>bblayers.conf</filename> file to determine what
+            layers BitBake needs to recognize, all necessary
+            <filename>layer.conf</filename> files (one from each layer),
+            and <filename>bitbake.conf</filename>.
+            The data itself is of various types:
+            <itemizedlist>
+                <listitem><para><emphasis>Recipes:</emphasis>
+                    Details about particular pieces of software.
+                    </para></listitem>
+                <listitem><para><emphasis>Class Data:</emphasis>
+                    An abstraction of common build information
+                    (e.g. how to build a Linux kernel).
+                    </para></listitem>
+                <listitem><para><emphasis>Configuration Data:</emphasis>
+                    Machine-specific settings, policy decisions,
+                    and so forth.
+                    Configuration data acts as the glue to bind everything
+                    together.</para></listitem>
+            </itemizedlist>
+        </para>
+
+        <para>
+            The <filename>layer.conf</filename> files are used to
+            construct key variables such as
+            <link linkend='var-BBPATH'><filename>BBPATH</filename></link>
+            and
+            <link linkend='var-BBFILES'><filename>BBFILES</filename></link>.
+            <filename>BBPATH</filename> is used to search for
+            configuration and class files under the
+            <filename>conf</filename> and <filename>classes</filename>
+            directories, respectively.
+            <filename>BBFILES</filename> is used to locate both recipe
+            and recipe append files
+            (<filename>.bb</filename> and <filename>.bbappend</filename>).
+            If there is no <filename>bblayers.conf</filename> file,
+            it is assumed the user has set the <filename>BBPATH</filename>
+            and <filename>BBFILES</filename> directly in the environment.
+        </para>
+
+        <para>
+            Next, the <filename>bitbake.conf</filename> file is located
+            using the <filename>BBPATH</filename> variable that was
+            just constructed.
+            The <filename>bitbake.conf</filename> file may also include other
+            configuration files using the
+            <filename>include</filename> or
+            <filename>require</filename> directives.
+        </para>
+
+        <para>
+            Prior to parsing configuration files, Bitbake looks
+            at certain variables, including:
+            <itemizedlist>
+                <listitem><para>
+                    <link linkend='var-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>
+                    </para></listitem>
+                <listitem><para>
+                    <link linkend='var-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link>
+                    </para></listitem>
+                <listitem><para>
+                    <link linkend='var-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link>
+                    </para></listitem>
+                <listitem><para>
+                    <link linkend='var-BB_ORIGENV'><filename>BB_ORIGENV</filename></link>
+                    </para></listitem>
+                <listitem><para>
+                    <link linkend='var-BITBAKE_UI'><filename>BITBAKE_UI</filename></link>
+                    </para></listitem>
+            </itemizedlist>
+            The first four variables in this list relate to how BitBake treats shell
+            environment variables during task execution.
+            By default, BitBake cleans the environment variables and provides tight
+            control over the shell execution environment.
+            However, through the use of these first four variables, you can
+            apply your control regarding the
+            environment variables allowed to be used by BitBake in the shell
+            during execution of tasks.
+            See the
+            "<link linkend='passing-information-into-the-build-task-environment'>Passing Information Into the Build Task Environment</link>"
+            section and the information about these variables in the
+            variable glossary for more information on how they work and
+            on how to use them.
+        </para>
+
+        <para>
+            The base configuration metadata is global
+            and therefore affects all recipes and tasks that are executed.
+        </para>
+
+        <para>
+            BitBake first searches the current working directory for an
+            optional <filename>conf/bblayers.conf</filename> configuration file.
+            This file is expected to contain a
+            <link linkend='var-BBLAYERS'><filename>BBLAYERS</filename></link>
+            variable that is a space-delimited list of 'layer' directories.
+            Recall that if BitBake cannot find a <filename>bblayers.conf</filename>
+            file, then it is assumed the user has set the <filename>BBPATH</filename>
+            and <filename>BBFILES</filename> variables directly in the environment.
+        </para>
+
+        <para>
+            For each directory (layer) in this list, a <filename>conf/layer.conf</filename>
+            file is located and parsed with the
+            <link linkend='var-LAYERDIR'><filename>LAYERDIR</filename></link>
+            variable being set to the directory where the layer was found.
+            The idea is these files automatically set up
+            <link linkend='var-BBPATH'><filename>BBPATH</filename></link>
+            and other variables correctly for a given build directory.
+        </para>
+
+        <para>
+            BitBake then expects to find the <filename>conf/bitbake.conf</filename>
+            file somewhere in the user-specified <filename>BBPATH</filename>.
+            That configuration file generally has include directives to pull
+            in any other metadata such as files specific to the architecture,
+            the machine, the local environment, and so forth.
+        </para>
+
+        <para>
+            Only variable definitions and include directives are allowed
+            in BitBake <filename>.conf</filename> files.
+            Some variables directly influence BitBake's behavior.
+            These variables might have been set from the environment
+            depending on the environment variables previously
+            mentioned or set in the configuration files.
+            The
+            "<link linkend='ref-variables-glos'>Variables Glossary</link>"
+            chapter presents a full list of variables.
+        </para>
+
+        <para>
+            After parsing configuration files, BitBake uses its rudimentary
+            inheritance mechanism, which is through class files, to inherit
+            some standard classes.
+            BitBake parses a class when the inherit directive responsible
+            for getting that class is encountered.
+        </para>
+
+        <para>
+            The <filename>base.bbclass</filename> file is always included.
+            Other classes that are specified in the configuration using the
+            <link linkend='var-INHERIT'><filename>INHERIT</filename></link>
+            variable are also included.
+            BitBake searches for class files in a
+            <filename>classes</filename> subdirectory under
+            the paths in <filename>BBPATH</filename> in the same way as
+            configuration files.
+        </para>
+
+        <para>
+            A good way to get an idea of the configuration files and
+            the class files used in your execution environment is to
+            run the following BitBake command:
+            <literallayout class='monospaced'>
+     $ bitbake -e > mybb.log
+            </literallayout>
+            Examining the top of the <filename>mybb.log</filename>
+            shows you the many configuration files and class files
+            used in your execution environment.
+        </para>
+
+        <note>
+            <para>
+                You need to be aware of how BitBake parses curly braces.
+                If a recipe uses a closing curly brace within the function and
+                the character has no leading spaces, BitBake produces a parsing
+                error.
+                If you use a pair of curly braces in a shell function, the
+                closing curly brace must not be located at the start of the line
+                without leading spaces.
+            </para>
+
+            <para>
+                Here is an example that causes BitBake to produce a parsing
+                error:
+                <literallayout class='monospaced'>
+     fakeroot create_shar() {
+         cat &lt;&lt; "EOF" &gt; ${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.sh
+     usage()
+     {
+       echo "test"
+       ###### The following "}" at the start of the line causes a parsing error ######
+     }
+     EOF
+     }
+                </literallayout>
+                Writing the recipe this way avoids the error:
+                <literallayout class='monospaced'>
+     fakeroot create_shar() {
+         cat &lt;&lt; "EOF" &gt; ${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.sh
+     usage()
+     {
+       echo "test"
+       ######The following "}" with a leading space at the start of the line avoids the error ######
+      }
+     EOF
+     }
+                </literallayout>
+            </para>
+        </note>
+    </section>
+
+    <section id='locating-and-parsing-recipes'>
+        <title>Locating and Parsing Recipes</title>
+
+        <para>
+            During the configuration phase, BitBake will have set
+            <link linkend='var-BBFILES'><filename>BBFILES</filename></link>.
+            BitBake now uses it to construct a list of recipes to parse,
+            along with any append files (<filename>.bbappend</filename>)
+            to apply.
+            <filename>BBFILES</filename> is a space-separated list of
+            available files and supports wildcards.
+            An example would be:
+            <literallayout class='monospaced'>
+     BBFILES = "/path/to/bbfiles/*.bb /path/to/appends/*.bbappend"
+            </literallayout>
+            BitBake parses each recipe and append file located
+            with <filename>BBFILES</filename> and stores the values of
+            various variables into the datastore.
+            <note>
+                Append files are applied in the order they are encountered in
+                <filename>BBFILES</filename>.
+            </note>
+            For each file, a fresh copy of the base configuration is
+            made, then the recipe is parsed line by line.
+            Any inherit statements cause BitBake to find and
+            then parse class files (<filename>.bbclass</filename>)
+            using
+            <link linkend='var-BBPATH'><filename>BBPATH</filename></link>
+            as the search path.
+            Finally, BitBake parses in order any append files found in
+            <filename>BBFILES</filename>.
+        </para>
+
+        <para>
+            One common convention is to use the recipe filename to define
+            pieces of metadata.
+            For example, in <filename>bitbake.conf</filename> the recipe
+            name and version are used to set the variables
+            <link linkend='var-PN'><filename>PN</filename></link> and
+            <link linkend='var-PV'><filename>PV</filename></link>:
+            <literallayout class='monospaced'>
+     PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
+     PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
+            </literallayout>
+            In this example, a recipe called "something_1.2.3.bb" would set
+            <filename>PN</filename> to "something" and
+            <filename>PV</filename> to "1.2.3".
+        </para>
+
+        <para>
+            By the time parsing is complete for a recipe, BitBake
+            has a list of tasks that the recipe defines and a set of
+            data consisting of keys and values as well as
+            dependency information about the tasks.
+        </para>
+
+        <para>
+            BitBake does not need all of this information.
+            It only needs a small subset of the information to make
+            decisions about the recipe.
+            Consequently, BitBake caches the values in which it is
+            interested and does not store the rest of the information.
+            Experience has shown it is faster to re-parse the metadata than to
+            try and write it out to the disk and then reload it.
+        </para>
+
+        <para>
+            Where possible, subsequent BitBake commands reuse this cache of
+            recipe information.
+            The validity of this cache is determined by first computing a
+            checksum of the base configuration data (see
+            <link linkend='var-BB_HASHCONFIG_WHITELIST'><filename>BB_HASHCONFIG_WHITELIST</filename></link>)
+            and then checking if the checksum matches.
+            If that checksum matches what is in the cache and the recipe
+            and class files have not changed, Bitbake is able to use
+            the cache.
+            BitBake then reloads the cached information about the recipe
+            instead of reparsing it from scratch.
+        </para>
+
+        <para>
+            Recipe file collections exist to allow the user to
+            have multiple repositories of
+            <filename>.bb</filename> files that contain the same
+            exact package.
+            For example, one could easily use them to make one's
+            own local copy of an upstream repository, but with
+            custom modifications that one does not want upstream.
+            Here is an example:
+            <literallayout class='monospaced'>
+    BBFILES = "/stuff/openembedded/*/*.bb /stuff/openembedded.modified/*/*.bb"
+    BBFILE_COLLECTIONS = "upstream local"
+    BBFILE_PATTERN_upstream = "^/stuff/openembedded/"
+    BBFILE_PATTERN_local = "^/stuff/openembedded.modified/"
+    BBFILE_PRIORITY_upstream = "5"
+    BBFILE_PRIORITY_local = "10"
+            </literallayout>
+            <note>
+                The layers mechanism is now the preferred method of collecting
+                code.
+                While the collections code remains, its main use is to set layer
+                priorities and to deal with overlap (conflicts) between layers.
+            </note>
+        </para>
+    </section>
+
+    <section id='bb-bitbake-providers'>
+        <title>Providers</title>
+
+        <para>
+            Assuming BitBake has been instructed to execute a target
+            and that all the recipe files have been parsed, BitBake
+            starts to figure out how to build the target.
+            BitBake looks through the <filename>PROVIDES</filename> list
+            for each of the recipes.
+            A <filename>PROVIDES</filename> list is the list of names by which
+            the recipe can be known.
+            Each recipe's <filename>PROVIDES</filename> list is created
+            implicitly through the recipe's
+            <link linkend='var-PN'><filename>PN</filename></link> variable
+            and explicitly through the recipe's
+            <link linkend='var-PROVIDES'><filename>PROVIDES</filename></link>
+            variable, which is optional.
+        </para>
+
+        <para>
+            When a recipe uses <filename>PROVIDES</filename>, that recipe's
+            functionality can be found under an alternative name or names other
+            than the implicit <filename>PN</filename> name.
+            As an example, suppose a recipe named <filename>keyboard_1.0.bb</filename>
+            contained the following:
+            <literallayout class='monospaced'>
+     PROVIDES += "fullkeyboard"
+            </literallayout>
+            The <filename>PROVIDES</filename> list for this recipe becomes
+            "keyboard", which is implicit, and "fullkeyboard", which is explicit.
+            Consequently, the functionality found in
+            <filename>keyboard_1.0.bb</filename> can be found under two
+            different names.
+        </para>
+    </section>
+
+    <section id='bb-bitbake-preferences'>
+        <title>Preferences</title>
+
+        <para>
+            The <filename>PROVIDES</filename> list is only part of the solution
+            for figuring out a target's recipes.
+            Because targets might have multiple providers, BitBake needs
+            to prioritize providers by determining provider preferences.
+        </para>
+
+        <para>
+            A common example in which a target has multiple providers
+            is "virtual/kernel", which is on the
+            <filename>PROVIDES</filename> list for each kernel recipe.
+            Each machine often selects the best kernel provider by using a
+            line similar to the following in the machine configuration file:
+            <literallayout class='monospaced'>
+     PREFERRED_PROVIDER_virtual/kernel = "linux-yocto"
+            </literallayout>
+            The default
+            <link linkend='var-PREFERRED_PROVIDER'><filename>PREFERRED_PROVIDER</filename></link>
+            is the provider with the same name as the target.
+            Bitbake iterates through each target it needs to build and
+            resolves them and their dependencies using this process.
+        </para>
+
+        <para>
+            Understanding how providers are chosen is made complicated by the fact
+            that multiple versions might exist for a given provider.
+            BitBake defaults to the highest version of a provider.
+            Version comparisons are made using the same method as Debian.
+            You can use the
+            <link linkend='var-PREFERRED_VERSION'><filename>PREFERRED_VERSION</filename></link>
+            variable to specify a particular version.
+            You can influence the order by using the
+            <link linkend='var-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
+            variable.
+        </para>
+
+        <para>
+            By default, files have a preference of "0".
+            Setting <filename>DEFAULT_PREFERENCE</filename> to "-1" makes the
+            recipe unlikely to be used unless it is explicitly referenced.
+            Setting <filename>DEFAULT_PREFERENCE</filename> to "1" makes it
+            likely the recipe is used.
+            <filename>PREFERRED_VERSION</filename> overrides any
+            <filename>DEFAULT_PREFERENCE</filename> setting.
+            <filename>DEFAULT_PREFERENCE</filename> is often used to mark newer
+            and more experimental recipe versions until they have undergone
+            sufficient testing to be considered stable.
+        </para>
+
+        <para>
+            When there are multiple “versions” of a given recipe,
+            BitBake defaults to selecting the most recent
+            version, unless otherwise specified.
+            If the recipe in question has a
+            <link linkend='var-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
+            set lower than the other recipes (default is 0), then
+            it will not be selected.
+            This allows the person or persons maintaining
+            the repository of recipe files to specify
+            their preference for the default selected version.
+            Additionally, the user can specify their preferred version.
+        </para>
+
+        <para>
+            If the first recipe is named <filename>a_1.1.bb</filename>, then the
+            <link linkend='var-PN'><filename>PN</filename></link> variable
+            will be set to “a”, and the
+            <link linkend='var-PV'><filename>PV</filename></link>
+            variable will be set to 1.1.
+        </para>
+
+        <para>
+            Thus, if a recipe named <filename>a_1.2.bb</filename> exists, BitBake
+            will choose 1.2 by default.
+            However, if you define the following variable in a
+            <filename>.conf</filename> file that BitBake parses, you
+            can change that preference:
+            <literallayout class='monospaced'>
+     PREFERRED_VERSION_a = "1.1"
+            </literallayout>
+        </para>
+
+        <note>
+            <para>
+                It is common for a recipe to provide two versions -- a stable,
+                numbered (and preferred) version, and a version that is
+                automatically checked out from a source code repository that
+                is considered more "bleeding edge" but can be selected only
+                explicitly.
+            </para>
+
+            <para>
+                For example, in the OpenEmbedded codebase, there is a standard,
+                versioned recipe file for BusyBox,
+                <filename>busybox_1.22.1.bb</filename>,
+                but there is also a Git-based version,
+                <filename>busybox_git.bb</filename>, which explicitly contains the line
+                <literallayout class='monospaced'>
+    DEFAULT_PREFERENCE = "-1"
+                </literallayout>
+                to ensure that the numbered, stable version is always preferred
+                unless the developer selects otherwise.
+            </para>
+        </note>
+    </section>
+
+    <section id='bb-bitbake-dependencies'>
+        <title>Dependencies</title>
+
+        <para>
+            Each target BitBake builds consists of multiple tasks such as
+            <filename>fetch</filename>, <filename>unpack</filename>,
+            <filename>patch</filename>, <filename>configure</filename>,
+            and <filename>compile</filename>.
+            For best performance on multi-core systems, BitBake considers each
+            task as an independent
+            entity with its own set of dependencies.
+        </para>
+
+        <para>
+            Dependencies are defined through several variables.
+            You can find information about variables BitBake uses in
+            the <link linkend='ref-variables-glos'>Variables Glossary</link>
+            near the end of this manual.
+            At a basic level, it is sufficient to know that BitBake uses the
+            <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link> and
+            <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link> variables when
+            calculating dependencies.
+        </para>
+
+        <para>
+            For more information on how BitBake handles dependencies, see the
+            "<link linkend='dependencies'>Dependencies</link>" section.
+        </para>
+    </section>
+
+    <section id='ref-bitbake-tasklist'>
+        <title>The Task List</title>
+
+        <para>
+            Based on the generated list of providers and the dependency information,
+            BitBake can now calculate exactly what tasks it needs to run and in what
+            order it needs to run them.
+            The
+            "<link linkend='executing-tasks'>Executing Tasks</link>" section has more
+            information on how BitBake chooses which task to execute next.
+        </para>
+
+        <para>
+            The build now starts with BitBake forking off threads up to the limit set in the
+            <link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
+            variable.
+            BitBake continues to fork threads as long as there are tasks ready to run,
+            those tasks have all their dependencies met, and the thread threshold has not been
+            exceeded.
+        </para>
+
+        <para>
+            It is worth noting that you can greatly speed up the build time by properly setting
+            the <filename>BB_NUMBER_THREADS</filename> variable.
+        </para>
+
+        <para>
+            As each task completes, a timestamp is written to the directory specified by the
+            <link linkend='var-STAMP'><filename>STAMP</filename></link> variable.
+            On subsequent runs, BitBake looks in the build directory within
+            <filename>tmp/stamps</filename> and does not rerun
+            tasks that are already completed unless a timestamp is found to be invalid.
+            Currently, invalid timestamps are only considered on a per
+            recipe file basis.
+            So, for example, if the configure stamp has a timestamp greater than the
+            compile timestamp for a given target, then the compile task would rerun.
+            Running the compile task again, however, has no effect on other providers
+            that depend on that target.
+        </para>
+
+        <para>
+            The exact format of the stamps is partly configurable.
+            In modern versions of BitBake, a hash is appended to the
+            stamp so that if the configuration changes, the stamp becomes
+            invalid and the task is automatically rerun.
+            This hash, or signature used, is governed by the signature policy
+            that is configured (see the
+            "<link linkend='checksums'>Checksums (Signatures)</link>"
+            section for information).
+            It is also possible to append extra metadata to the stamp using
+            the "stamp-extra-info" task flag.
+            For example, OpenEmbedded uses this flag to make some tasks machine-specific.
+        </para>
+
+        <note>
+            Some tasks are marked as "nostamp" tasks.
+            No timestamp file is created when these tasks are run.
+            Consequently, "nostamp" tasks are always rerun.
+        </note>
+
+        <para>
+            For more information on tasks, see the
+            "<link linkend='tasks'>Tasks</link>" section.
+        </para>
+    </section>
+
+    <section id='executing-tasks'>
+        <title>Executing Tasks</title>
+
+        <para>
+            Tasks can be either a shell task or a Python task.
+            For shell tasks, BitBake writes a shell script to
+            <filename>${</filename><link linkend='var-T'><filename>T</filename></link><filename>}/run.do_taskname.pid</filename>
+            and then executes the script.
+            The generated shell script contains all the exported variables,
+            and the shell functions with all variables expanded.
+            Output from the shell script goes to the file
+            <filename>${T}/log.do_taskname.pid</filename>.
+            Looking at the expanded shell functions in the run file and
+            the output in the log files is a useful debugging technique.
+        </para>
+
+        <para>
+            For Python tasks, BitBake executes the task internally and logs
+            information to the controlling terminal.
+            Future versions of BitBake will write the functions to files
+            similar to the way shell tasks are handled.
+            Logging will be handled in a way similar to shell tasks as well.
+        </para>
+
+        <para>
+            The order in which BitBake runs the tasks is controlled by its
+            task scheduler.
+            It is possible to configure the scheduler and define custom
+            implementations for specific use cases.
+            For more information, see these variables that control the
+            behavior:
+            <itemizedlist>
+                <listitem><para>
+                    <link linkend='var-BB_SCHEDULER'><filename>BB_SCHEDULER</filename></link>
+                    </para></listitem>
+                <listitem><para>
+                    <link linkend='var-BB_SCHEDULERS'><filename>BB_SCHEDULERS</filename></link>
+                    </para></listitem>
+            </itemizedlist>
+            It is possible to have functions run before and after a task's main
+            function.
+            This is done using the "prefuncs" and "postfuncs" flags of the task
+            that lists the functions to run.
+        </para>
+    </section>
+
+    <section id='checksums'>
+        <title>Checksums (Signatures)</title>
+
+        <para>
+            A checksum is a unique signature of a task's inputs.
+            The signature of a task can be used to determine if a task
+            needs to be run.
+            Because it is a change in a task's inputs that triggers running
+            the task, BitBake needs to detect all the inputs to a given task.
+            For shell tasks, this turns out to be fairly easy because
+            BitBake generates a "run" shell script for each task and
+            it is possible to create a checksum that gives you a good idea of when
+            the task's data changes.
+        </para>
+
+        <para>
+            To complicate the problem, some things should not be included in
+            the checksum.
+            First, there is the actual specific build path of a given task -
+            the working directory.
+            It does not matter if the working directory changes because it should not
+            affect the output for target packages.
+            The simplistic approach for excluding the working directory is to set
+            it to some fixed value and create the checksum for the "run" script.
+            BitBake goes one step better and uses the
+            <link linkend='var-BB_HASHBASE_WHITELIST'><filename>BB_HASHBASE_WHITELIST</filename></link>
+            variable to define a list of variables that should never be included
+            when generating the signatures.
+        </para>
+
+        <para>
+            Another problem results from the "run" scripts containing functions that
+            might or might not get called.
+            The incremental build solution contains code that figures out dependencies
+            between shell functions.
+            This code is used to prune the "run" scripts down to the minimum set,
+            thereby alleviating this problem and making the "run" scripts much more
+            readable as a bonus.
+        </para>
+
+        <para>
+            So far we have solutions for shell scripts.
+            What about Python tasks?
+            The same approach applies even though these tasks are more difficult.
+            The process needs to figure out what variables a Python function accesses
+            and what functions it calls.
+            Again, the incremental build solution contains code that first figures out
+            the variable and function dependencies, and then creates a checksum for the data
+            used as the input to the task.
+        </para>
+
+        <para>
+            Like the working directory case, situations exist where dependencies
+            should be ignored.
+            For these cases, you can instruct the build process to ignore a dependency
+            by using a line like the following:
+            <literallayout class='monospaced'>
+     PACKAGE_ARCHS[vardepsexclude] = "MACHINE"
+            </literallayout>
+            This example ensures that the <filename>PACKAGE_ARCHS</filename> variable does not
+            depend on the value of <filename>MACHINE</filename>, even if it does reference it.
+        </para>
+
+        <para>
+            Equally, there are cases where we need to add dependencies BitBake
+            is not able to find.
+            You can accomplish this by using a line like the following:
+            <literallayout class='monospaced'>
+      PACKAGE_ARCHS[vardeps] = "MACHINE"
+            </literallayout>
+            This example explicitly adds the <filename>MACHINE</filename> variable as a
+            dependency for <filename>PACKAGE_ARCHS</filename>.
+        </para>
+
+        <para>
+            Consider a case with in-line Python, for example, where BitBake is not
+            able to figure out dependencies.
+            When running in debug mode (i.e. using <filename>-DDD</filename>), BitBake
+            produces output when it discovers something for which it cannot figure out
+            dependencies.
+        </para>
+
+        <para>
+            Thus far, this section has limited discussion to the direct inputs into a task.
+            Information based on direct inputs is referred to as the "basehash" in the
+            code.
+            However, there is still the question of a task's indirect inputs - the
+            things that were already built and present in the build directory.
+            The checksum (or signature) for a particular task needs to add the hashes
+            of all the tasks on which the particular task depends.
+            Choosing which dependencies to add is a policy decision.
+            However, the effect is to generate a master checksum that combines the basehash
+            and the hashes of the task's dependencies.
+        </para>
+
+        <para>
+            At the code level, there are a variety of ways both the basehash and the
+            dependent task hashes can be influenced.
+            Within the BitBake configuration file, we can give BitBake some extra information
+            to help it construct the basehash.
+            The following statement effectively results in a list of global variable
+            dependency excludes - variables never included in any checksum.
+            This example uses variables from OpenEmbedded to help illustrate
+            the concept:
+            <literallayout class='monospaced'>
+     BB_HASHBASE_WHITELIST ?= "TMPDIR FILE PATH PWD BB_TASKHASH BBPATH DL_DIR \
+         SSTATE_DIR THISDIR FILESEXTRAPATHS FILE_DIRNAME HOME LOGNAME SHELL TERM \
+         USER FILESPATH STAGING_DIR_HOST STAGING_DIR_TARGET COREBASE PRSERV_HOST \
+         PRSERV_DUMPDIR PRSERV_DUMPFILE PRSERV_LOCKDOWN PARALLEL_MAKE \
+         CCACHE_DIR EXTERNAL_TOOLCHAIN CCACHE CCACHE_DISABLE LICENSE_PATH SDKPKGSUFFIX"
+            </literallayout>
+            The previous example excludes the work directory, which is part of
+            <filename>TMPDIR</filename>.
+        </para>
+
+        <para>
+            The rules for deciding which hashes of dependent tasks to include through
+            dependency chains are more complex and are generally accomplished with a
+            Python function.
+            The code in <filename>meta/lib/oe/sstatesig.py</filename> shows two examples
+            of this and also illustrates how you can insert your own policy into the system
+            if so desired.
+            This file defines the two basic signature generators OpenEmbedded Core
+            uses:  "OEBasic" and "OEBasicHash".
+            By default, there is a dummy "noop" signature handler enabled in BitBake.
+            This means that behavior is unchanged from previous versions.
+            <filename>OE-Core</filename> uses the "OEBasicHash" signature handler by default
+            through this setting in the <filename>bitbake.conf</filename> file:
+            <literallayout class='monospaced'>
+     BB_SIGNATURE_HANDLER ?= "OEBasicHash"
+            </literallayout>
+            The "OEBasicHash" <filename>BB_SIGNATURE_HANDLER</filename> is the same as the
+            "OEBasic" version but adds the task hash to the stamp files.
+            This results in any metadata change that changes the task hash, automatically
+            causing the task to be run again.
+            This removes the need to bump
+            <link linkend='var-PR'><filename>PR</filename></link>
+            values, and changes to metadata automatically ripple across the build.
+        </para>
+
+        <para>
+            It is also worth noting that the end result of these signature generators is to
+            make some dependency and hash information available to the build.
+            This information includes:
+            <itemizedlist>
+                <listitem><para><filename>BB_BASEHASH_task-</filename><replaceable>taskname</replaceable>:
+                    The base hashes for each task in the recipe.
+                    </para></listitem>
+                <listitem><para><filename>BB_BASEHASH_</filename><replaceable>filename</replaceable><filename>:</filename><replaceable>taskname</replaceable>:
+                    The base hashes for each dependent task.
+                    </para></listitem>
+                <listitem><para><filename>BBHASHDEPS_</filename><replaceable>filename</replaceable><filename>:</filename><replaceable>taskname</replaceable>:
+                    The task dependencies for each task.
+                    </para></listitem>
+                <listitem><para><filename>BB_TASKHASH</filename>:
+                    The hash of the currently running task.
+                    </para></listitem>
+            </itemizedlist>
+        </para>
+
+        <para>
+            It is worth noting that BitBake's "-S" option lets you
+            debug Bitbake's processing of signatures.
+            The options passed to -S allow different debugging modes
+            to be used, either using BitBake's own debug functions
+            or possibly those defined in the metadata/signature handler
+            itself.
+            The simplest parameter to pass is "none", which causes a
+            set of signature information to be written out into
+            <filename>STAMP_DIR</filename>
+            corresponding to the targets specified.
+            The other currently available parameter is "printdiff",
+            which causes BitBake to try to establish the closest
+            signature match it can (e.g. in the sstate cache) and then
+            run <filename>bitbake-diffsigs</filename> over the matches
+            to determine the stamps and delta where these two
+            stamp trees diverge.
+            <note>
+                It is likely that future versions of BitBake will
+                provide other signature handlers triggered through
+                additional "-S" parameters.
+            </note>
+        </para>
+
+        <para>
+            You can find more information on checksum metadata in the
+            "<link linkend='task-checksums-and-setscene'>Task Checksums and Setscene</link>"
+            section.
+        </para>
+    </section>
+
+    <section id='setscene'>
+        <title>Setscene</title>
+
+        <para>
+            The setscene process enables BitBake to handle "pre-built" artifacts.
+            The ability to handle and reuse these artifacts allows BitBake
+            the luxury of not having to build something from scratch every time.
+            Instead, BitBake can use, when possible, existing build artifacts.
+        </para>
+
+        <para>
+            BitBake needs to have reliable data indicating whether or not an
+            artifact is compatible.
+            Signatures, described in the previous section, provide an ideal
+            way of representing whether an artifact is compatible.
+            If a signature is the same, an object can be reused.
+        </para>
+
+        <para>
+            If an object can be reused, the problem then becomes how to
+            replace a given task or set of tasks with the pre-built artifact.
+            BitBake solves the problem with the "setscene" process.
+        </para>
+
+        <para>
+            When BitBake is asked to build a given target, before building anything,
+            it first asks whether cached information is available for any of the
+            targets it's building, or any of the intermediate targets.
+            If cached information is available, BitBake uses this information instead of
+            running the main tasks.
+        </para>
+
+        <para>
+            BitBake first calls the function defined by the
+            <link linkend='var-BB_HASHCHECK_FUNCTION'><filename>BB_HASHCHECK_FUNCTION</filename></link>
+            variable with a list of tasks and corresponding
+            hashes it wants to build.
+            This function is designed to be fast and returns a list
+            of the tasks for which it believes in can obtain artifacts.
+        </para>
+
+        <para>
+            Next, for each of the tasks that were returned as possibilities,
+            BitBake executes a setscene version of the task that the possible
+            artifact covers.
+            Setscene versions of a task have the string "_setscene" appended to the
+            task name.
+            So, for example, the task with the name <filename>xxx</filename> has
+            a setscene task named <filename>xxx_setscene</filename>.
+            The setscene version of the task executes and provides the necessary
+            artifacts returning either success or failure.
+        </para>
+
+        <para>
+            As previously mentioned, an artifact can cover more than one task.
+            For example, it is pointless to obtain a compiler if you
+            already have the compiled binary.
+            To handle this, BitBake calls the
+            <link linkend='var-BB_SETSCENE_DEPVALID'><filename>BB_SETSCENE_DEPVALID</filename></link>
+            function for each successful setscene task to know whether or not it needs
+            to obtain the dependencies of that task.
+        </para>
+
+        <para>
+            Finally, after all the setscene tasks have executed, BitBake calls the
+            function listed in
+            <link linkend='var-BB_SETSCENE_VERIFY_FUNCTION'><filename>BB_SETSCENE_VERIFY_FUNCTION</filename></link>
+            with the list of tasks BitBake thinks has been "covered".
+            The metadata can then ensure that this list is correct and can
+            inform BitBake that it wants specific tasks to be run regardless
+            of the setscene result.
+        </para>
+
+        <para>
+            You can find more information on setscene metadata in the
+            "<link linkend='task-checksums-and-setscene'>Task Checksums and Setscene</link>"
+            section.
+        </para>
+    </section>
+</chapter>

+ 765 - 0
bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml

@@ -0,0 +1,765 @@
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
+"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
+
+<chapter>
+<title>File Download Support</title>
+
+    <para>
+        BitBake's fetch module is a standalone piece of library code
+        that deals with the intricacies of downloading source code
+        and files from remote systems.
+        Fetching source code is one of the cornerstones of building software.
+        As such, this module forms an important part of BitBake.
+    </para>
+
+    <para>
+        The current fetch module is called "fetch2" and refers to the
+        fact that it is the second major version of the API.
+        The original version is obsolete and has been removed from the codebase.
+        Thus, in all cases, "fetch" refers to "fetch2" in this
+        manual.
+    </para>
+
+    <section id='the-download-fetch'>
+        <title>The Download (Fetch)</title>
+
+        <para>
+            BitBake takes several steps when fetching source code or files.
+            The fetcher codebase deals with two distinct processes in order:
+            obtaining the files from somewhere (cached or otherwise)
+            and then unpacking those files into a specific location and
+            perhaps in a specific way.
+            Getting and unpacking the files is often optionally followed
+            by patching.
+            Patching, however, is not covered by this module.
+        </para>
+
+        <para>
+            The code to execute the first part of this process, a fetch,
+            looks something like the following:
+            <literallayout class='monospaced'>
+     src_uri = (d.getVar('SRC_URI', True) or "").split()
+     fetcher = bb.fetch2.Fetch(src_uri, d)
+     fetcher.download()
+            </literallayout>
+            This code sets up an instance of the fetch class.
+            The instance uses a space-separated list of URLs from the
+            <link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
+            variable and then calls the <filename>download</filename>
+            method to download the files.
+        </para>
+
+        <para>
+            The instantiation of the fetch class is usually followed by:
+            <literallayout class='monospaced'>
+     rootdir = l.getVar('WORKDIR', True)
+     fetcher.unpack(rootdir)
+            </literallayout>
+            This code unpacks the downloaded files to the
+            specified by <filename>WORKDIR</filename>.
+            <note>
+                For convenience, the naming in these examples matches
+                the variables used by OpenEmbedded.
+                If you want to see the above code in action, examine
+                the OpenEmbedded class file <filename>base.bbclass</filename>.
+            </note>
+            The <filename>SRC_URI</filename> and <filename>WORKDIR</filename>
+            variables are not hardcoded into the fetcher, since those fetcher
+            methods can be (and are) called with different variable names.
+            In OpenEmbedded for example, the shared state (sstate) code uses
+            the fetch module to fetch the sstate files.
+        </para>
+
+        <para>
+            When the <filename>download()</filename> method is called,
+            BitBake tries to resolve the URLs by looking for source files
+            in a specific search order:
+            <itemizedlist>
+                <listitem><para><emphasis>Pre-mirror Sites:</emphasis>
+                    BitBake first uses pre-mirrors to try and find source files.
+                    These locations are defined using the
+                    <link linkend='var-PREMIRRORS'><filename>PREMIRRORS</filename></link>
+                    variable.
+                    </para></listitem>
+                <listitem><para><emphasis>Source URI:</emphasis>
+                    If pre-mirrors fail, BitBake uses the original URL (e.g from
+                    <filename>SRC_URI</filename>).
+                    </para></listitem>
+                <listitem><para><emphasis>Mirror Sites:</emphasis>
+                    If fetch failures occur, BitBake next uses mirror locations as
+                    defined by the
+                    <link linkend='var-MIRRORS'><filename>MIRRORS</filename></link>
+                    variable.
+                    </para></listitem>
+            </itemizedlist>
+        </para>
+
+        <para>
+            For each URL passed to the fetcher, the fetcher
+            calls the submodule that handles that particular URL type.
+            This behavior can be the source of some confusion when you
+            are providing URLs for the <filename>SRC_URI</filename>
+            variable.
+            Consider the following two URLs:
+            <literallayout class='monospaced'>
+     http://git.yoctoproject.org/git/poky;protocol=git
+     git://git.yoctoproject.org/git/poky;protocol=http
+            </literallayout>
+            In the former case, the URL is passed to the
+            <filename>wget</filename> fetcher, which does not
+            understand "git".
+            Therefore, the latter case is the correct form since the
+            Git fetcher does know how to use HTTP as a transport.
+        </para>
+
+        <para>
+            Here are some examples that show commonly used mirror
+            definitions:
+            <literallayout class='monospaced'>
+     PREMIRRORS ?= "\
+         bzr://.*/.*   http://somemirror.org/sources/ \n \
+         cvs://.*/.*   http://somemirror.org/sources/ \n \
+         git://.*/.*   http://somemirror.org/sources/ \n \
+         hg://.*/.*    http://somemirror.org/sources/ \n \
+         osc://.*/.*   http://somemirror.org/sources/ \n \
+         p4://.*/.*    http://somemirror.org/sources/ \n \
+         svn://.*/.*   http://somemirror.org/sources/ \n"
+
+     MIRRORS =+ "\
+         ftp://.*/.*      http://somemirror.org/sources/ \n \
+         http://.*/.*     http://somemirror.org/sources/ \n \
+         https://.*/.*    http://somemirror.org/sources/ \n"
+            </literallayout>
+            It is useful to note that BitBake supports
+            cross-URLs.
+            It is possible to mirror a Git repository on an HTTP
+            server as a tarball.
+            This is what the <filename>git://</filename> mapping in
+            the previous example does.
+        </para>
+
+        <para>
+            Since network accesses are slow, Bitbake maintains a
+            cache of files downloaded from the network.
+            Any source files that are not local (i.e.
+            downloaded from the Internet) are placed into the download
+            directory, which is specified by the
+            <link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
+            variable.
+        </para>
+
+        <para>
+            File integrity is of key importance for reproducing builds.
+            For non-local archive downloads, the fetcher code can verify
+            SHA-256 and MD5 checksums to ensure the archives have been
+            downloaded correctly.
+            You can specify these checksums by using the
+            <filename>SRC_URI</filename> variable with the appropriate
+            varflags as follows:
+            <literallayout class='monospaced'>
+     SRC_URI[md5sum] = "<replaceable>value</replaceable>"
+     SRC_URI[sha256sum] = "<replaceable>value</replaceable>"
+            </literallayout>
+            You can also specify the checksums as parameters on the
+            <filename>SRC_URI</filename> as shown below:
+            <literallayout class='monospaced'>
+     SRC_URI = "http://example.com/foobar.tar.bz2;md5sum=4a8e0f237e961fd7785d19d07fdb994d"
+            </literallayout>
+            If multiple URIs exist, you can specify the checksums either
+            directly as in the previous example, or you can name the URLs.
+            The following syntax shows how you name the URIs:
+            <literallayout class='monospaced'>
+     SRC_URI = "http://example.com/foobar.tar.bz2;name=foo"
+     SRC_URI[foo.md5sum] = 4a8e0f237e961fd7785d19d07fdb994d
+            </literallayout>
+            After a file has been downloaded and has had its checksum checked,
+            a ".done" stamp is placed in <filename>DL_DIR</filename>.
+            BitBake uses this stamp during subsequent builds to avoid
+            downloading or comparing a checksum for the file again.
+            <note>
+                It is assumed that local storage is safe from data corruption.
+                If this were not the case, there would be bigger issues to worry about.
+            </note>
+        </para>
+
+        <para>
+            If
+            <link linkend='var-BB_STRICT_CHECKSUM'><filename>BB_STRICT_CHECKSUM</filename></link>
+            is set, any download without a checksum triggers an
+            error message.
+            The
+            <link linkend='var-BB_NO_NETWORK'><filename>BB_NO_NETWORK</filename></link>
+            variable can be used to make any attempted network access a fatal
+            error, which is useful for checking that mirrors are complete
+            as well as other things.
+        </para>
+    </section>
+
+    <section id='bb-the-unpack'>
+        <title>The Unpack</title>
+
+        <para>
+            The unpack process usually immediately follows the download.
+            For all URLs except Git URLs, BitBake uses the common
+            <filename>unpack</filename> method.
+        </para>
+
+        <para>
+            A number of parameters exist that you can specify within the
+            URL to govern the behavior of the unpack stage:
+            <itemizedlist>
+                <listitem><para><emphasis>unpack:</emphasis>
+                    Controls whether the URL components are unpacked.
+                    If set to "1", which is the default, the components
+                    are unpacked.
+                    If set to "0", the unpack stage leaves the file alone.
+                    This parameter is useful when you want an archive to be
+                    copied in and not be unpacked.
+                    </para></listitem>
+                <listitem><para><emphasis>dos:</emphasis>
+                    Applies to <filename>.zip</filename> and
+                    <filename>.jar</filename> files and specifies whether to
+                    use DOS line ending conversion on text files.
+                    </para></listitem>
+                <listitem><para><emphasis>basepath:</emphasis>
+                    Instructs the unpack stage to strip the specified
+                    directories from the source path when unpacking.
+                    </para></listitem>
+                <listitem><para><emphasis>subdir:</emphasis>
+                    Unpacks the specific URL to the specified subdirectory
+                    within the root directory.
+                    </para></listitem>
+            </itemizedlist>
+            The unpack call automatically decompresses and extracts files
+            with ".Z", ".z", ".gz", ".xz", ".zip", ".jar", ".ipk", ".rpm".
+            ".srpm", ".deb" and ".bz2" extensions as well as various combinations
+            of tarball extensions.
+        </para>
+
+        <para>
+            As mentioned, the Git fetcher has its own unpack method that
+            is optimized to work with Git trees.
+            Basically, this method works by cloning the tree into the final
+            directory.
+            The process is completed using references so that there is
+            only one central copy of the Git metadata needed.
+        </para>
+    </section>
+
+    <section id='bb-fetchers'>
+        <title>Fetchers</title>
+
+        <para>
+            As mentioned earlier, the URL prefix determines which
+            fetcher submodule BitBake uses.
+            Each submodule can support different URL parameters,
+            which are described in the following sections.
+        </para>
+
+        <section id='local-file-fetcher'>
+            <title>Local file fetcher (<filename>file://</filename>)</title>
+
+            <para>
+                This submodule handles URLs that begin with
+                <filename>file://</filename>.
+                The filename you specify within the URL can be
+                either an absolute or relative path to a file.
+                If the filename is relative, the contents of the
+                <link linkend='var-FILESPATH'><filename>FILESPATH</filename></link>
+                variable is used in the same way
+                <filename>PATH</filename> is used to find executables.
+                Failing that,
+                <link linkend='var-FILESDIR'><filename>FILESDIR</filename></link>
+                is used to find the appropriate relative file.
+                <note>
+                    <filename>FILESDIR</filename> is deprecated and can
+                    be replaced with <filename>FILESPATH</filename>.
+                    Because <filename>FILESDIR</filename> is likely to be
+                    removed, you should not use this variable in any new code.
+                </note>
+                If the file cannot be found, it is assumed that it is available in
+                <link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
+                by the time the <filename>download()</filename> method is called.
+            </para>
+
+            <para>
+                If you specify a directory, the entire directory is
+                unpacked.
+            </para>
+
+            <para>
+                Here are a couple of example URLs, the first relative and
+                the second absolute:
+                <literallayout class='monospaced'>
+     SRC_URI = "file://relativefile.patch"
+     SRC_URI = "file:///Users/ich/very_important_software"
+                </literallayout>
+            </para>
+        </section>
+
+        <section id='http-ftp-fetcher'>
+            <title>HTTP/FTP wget fetcher (<filename>http://</filename>, <filename>ftp://</filename>, <filename>https://</filename>)</title>
+
+            <para>
+                This fetcher obtains files from web and FTP servers.
+                Internally, the fetcher uses the wget utility.
+            </para>
+
+            <para>
+                The executable and parameters used are specified by the
+                <filename>FETCHCMD_wget</filename> variable, which defaults
+                to sensible values.
+                The fetcher supports a parameter "downloadfilename" that
+                allows the name of the downloaded file to be specified.
+                Specifying the name of the downloaded file is useful
+                for avoiding collisions in
+                <link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
+                when dealing with multiple files that have the same name.
+            </para>
+
+            <para>
+                Some example URLs are as follows:
+                <literallayout class='monospaced'>
+     SRC_URI = "http://oe.handhelds.org/not_there.aac"
+     SRC_URI = "ftp://oe.handhelds.org/not_there_as_well.aac"
+     SRC_URI = "ftp://you@oe.handhelds.org/home/you/secret.plan"
+                </literallayout>
+            </para>
+            <note>
+               Because URL parameters are delimited by semi-colons, this can
+               introduce ambiguity when parsing URLs that also contain semi-colons,
+               for example:
+                <literallayout class='monospaced'>
+     SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git;a=snapshot;h=a5dd47"
+                </literallayout>
+               Such URLs should should be modified by replacing semi-colons with '&amp;' characters:
+               <literallayout class='monospaced'>
+     SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git&amp;a=snapshot&amp;h=a5dd47"
+                </literallayout>
+                In most cases this should work. Treating semi-colons and '&amp;' in queries
+                identically is recommended by the World Wide Web Consortium (W3C).
+                Note that due to the nature of the URL, you may have to specify the name
+                of the downloaded file as well:
+              <literallayout class='monospaced'>
+     SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git&amp;a=snapshot&amp;h=a5dd47;downloadfilename=myfile.bz2"
+                </literallayout>
+            </note>
+        </section>
+
+        <section id='cvs-fetcher'>
+            <title>CVS fetcher (<filename>(cvs://</filename>)</title>
+
+            <para>
+                This submodule handles checking out files from the
+                CVS version control system.
+                You can configure it using a number of different variables:
+                <itemizedlist>
+                    <listitem><para><emphasis><filename>FETCHCMD_cvs</filename>:</emphasis>
+                        The name of the executable to use when running
+                        the <filename>cvs</filename> command.
+                        This name is usually "cvs".
+                        </para></listitem>
+                    <listitem><para><emphasis><filename>SRCDATE</filename>:</emphasis>
+                        The date to use when fetching the CVS source code.
+                        A special value of "now" causes the checkout to
+                        be updated on every build.
+                        </para></listitem>
+                    <listitem><para><emphasis><link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>:</emphasis>
+                        Specifies where a temporary checkout is saved.
+                        The location is often <filename>DL_DIR/cvs</filename>.
+                        </para></listitem>
+                    <listitem><para><emphasis><filename>CVS_PROXY_HOST</filename>:</emphasis>
+                        The name to use as a "proxy=" parameter to the
+                        <filename>cvs</filename> command.
+                        </para></listitem>
+                    <listitem><para><emphasis><filename>CVS_PROXY_PORT</filename>:</emphasis>
+                        The port number to use as a "proxyport=" parameter to
+                        the <filename>cvs</filename> command.
+                        </para></listitem>
+                </itemizedlist>
+                As well as the standard username and password URL syntax,
+                you can also configure the fetcher with various URL parameters:
+            </para>
+
+            <para>
+                The supported parameters are as follows:
+                <itemizedlist>
+                    <listitem><para><emphasis>"method":</emphasis>
+                        The protocol over which to communicate with the CVS server.
+                        By default, this protocol is "pserver".
+                        If "method" is set to "ext", BitBake examines the
+                        "rsh" parameter and sets <filename>CVS_RSH</filename>.
+                        You can use "dir" for local directories.
+                        </para></listitem>
+                    <listitem><para><emphasis>"module":</emphasis>
+                        Specifies the module to check out.
+                        You must supply this parameter.
+                        </para></listitem>
+                    <listitem><para><emphasis>"tag":</emphasis>
+                        Describes which CVS TAG should be used for
+                        the checkout.
+                        By default, the TAG is empty.
+                        </para></listitem>
+                    <listitem><para><emphasis>"date":</emphasis>
+                        Specifies a date.
+                        If no "date" is specified, the
+                        <link linkend='var-SRCDATE'><filename>SRCDATE</filename></link>
+                        of the configuration is used to checkout a specific date.
+                        The special value of "now" causes the checkout to be
+                        updated on every build.
+                        </para></listitem>
+                    <listitem><para><emphasis>"localdir":</emphasis>
+                        Used to rename the module.
+                        Effectively, you are renaming the output directory
+                        to which the module is unpacked.
+                        You are forcing the module into a special
+                        directory relative to
+                        <link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>.
+                        </para></listitem>
+                    <listitem><para><emphasis>"rsh"</emphasis>
+                        Used in conjunction with the "method" parameter.
+                        </para></listitem>
+                    <listitem><para><emphasis>"scmdata":</emphasis>
+                        Causes the CVS metadata to be maintained in the tarball
+                        the fetcher creates when set to "keep".
+                        The tarball is expanded into the work directory.
+                        By default, the CVS metadata is removed.
+                        </para></listitem>
+                    <listitem><para><emphasis>"fullpath":</emphasis>
+                        Controls whether the resulting checkout is at the
+                        module level, which is the default, or is at deeper
+                        paths.
+                        </para></listitem>
+                    <listitem><para><emphasis>"norecurse":</emphasis>
+                        Causes the fetcher to only checkout the specified
+                        directory with no recurse into any subdirectories.
+                        </para></listitem>
+                    <listitem><para><emphasis>"port":</emphasis>
+                        The port to which the CVS server connects.
+                        </para></listitem>
+                </itemizedlist>
+                Some example URLs are as follows:
+                <literallayout class='monospaced'>
+     SRC_URI = "cvs://CVSROOT;module=mymodule;tag=some-version;method=ext"
+     SRC_URI = "cvs://CVSROOT;module=mymodule;date=20060126;localdir=usethat"
+                </literallayout>
+            </para>
+        </section>
+
+        <section id='svn-fetcher'>
+            <title>Subversion (SVN) Fetcher (<filename>svn://</filename>)</title>
+
+            <para>
+                This fetcher submodule fetches code from the
+                Subversion source control system.
+                The executable used is specified by
+                <filename>FETCHCMD_svn</filename>, which defaults
+                to "svn".
+                The fetcher's temporary working directory is set by
+                <link linkend='var-SVNDIR'><filename>SVNDIR</filename></link>,
+                which is usually <filename>DL_DIR/svn</filename>.
+            </para>
+
+            <para>
+                The supported parameters are as follows:
+                <itemizedlist>
+                    <listitem><para><emphasis>"module":</emphasis>
+                        The name of the svn module to checkout.
+                        You must provide this parameter.
+                        You can think of this parameter as the top-level
+                        directory of the repository data you want.
+                        </para></listitem>
+                    <listitem><para><emphasis>"protocol":</emphasis>
+                        The protocol to use, which defaults to "svn".
+                        Other options are "svn+ssh" and "rsh".
+                        For "rsh", the "rsh" parameter is also used.
+                        </para></listitem>
+                    <listitem><para><emphasis>"rev":</emphasis>
+                        The revision of the source code to checkout.
+                        </para></listitem>
+                    <listitem><para><emphasis>"date":</emphasis>
+                        The date of the source code to checkout.
+                        Specific revisions are generally much safer to checkout
+                        rather than by date as they do not involve timezones
+                        (e.g. they are much more deterministic).
+                        </para></listitem>
+                    <listitem><para><emphasis>"scmdata":</emphasis>
+                        Causes the “.svn” directories to be available during
+                        compile-time when set to "keep".
+                        By default, these directories are removed.
+                        </para></listitem>
+                    <listitem><para><emphasis>"transportuser":</emphasis>
+                        When required, sets the username for the transport.
+                        By default, this parameter is empty.
+                        The transport username is different than the username
+                        used in the main URL, which is passed to the subversion
+                        command.
+                        </para></listitem>
+                </itemizedlist>
+                Following are two examples using svn:
+                <literallayout class='monospaced'>
+     SRC_URI = "svn://svn.oe.handhelds.org/svn;module=vip;proto=http;rev=667"
+     SRC_URI = "svn://svn.oe.handhelds.org/svn/;module=opie;proto=svn+ssh;date=20060126"
+                </literallayout>
+            </para>
+        </section>
+
+        <section id='git-fetcher'>
+            <title>Git Fetcher (<filename>git://</filename>)</title>
+
+            <para>
+                This fetcher submodule fetches code from the Git
+                source control system.
+                The fetcher works by creating a bare clone of the
+                remote into
+                <link linkend='var-GITDIR'><filename>GITDIR</filename></link>,
+                which is usually <filename>DL_DIR/git2</filename>.
+                This bare clone is then cloned into the work directory during the
+                unpack stage when a specific tree is checked out.
+                This is done using alternates and by reference to
+                minimize the amount of duplicate data on the disk and
+                make the unpack process fast.
+                The executable used can be set with
+                <filename>FETCHCMD_git</filename>.
+            </para>
+
+            <para>
+                This fetcher supports the following parameters:
+                <itemizedlist>
+                    <listitem><para><emphasis>"protocol":</emphasis>
+                        The protocol used to fetch the files.
+                        The default is "git" when a hostname is set.
+                        If a hostname is not set, the Git protocol is "file".
+                        You can also use "http", "https", "ssh" and "rsync".
+                        </para></listitem>
+                    <listitem><para><emphasis>"nocheckout":</emphasis>
+                        Tells the fetcher to not checkout source code when
+                        unpacking when set to "1".
+                        Set this option for the URL where there is a custom
+                        routine to checkout code.
+                        The default is "0".
+                        </para></listitem>
+                    <listitem><para><emphasis>"rebaseable":</emphasis>
+                        Indicates that the upstream Git repository can be rebased.
+                        You should set this parameter to "1" if
+                        revisions can become detached from branches.
+                        In this case, the source mirror tarball is done per
+                        revision, which has a loss of efficiency.
+                        Rebasing the upstream Git repository could cause the
+                        current revision to disappear from the upstream repository.
+                        This option reminds the fetcher to preserve the local cache
+                        carefully for future use.
+                        The default value for this parameter is "0".
+                        </para></listitem>
+                    <listitem><para><emphasis>"nobranch":</emphasis>
+                        Tells the fetcher to not check the SHA validation
+                        for the branch when set to "1".
+                        The default is "0".
+                        Set this option for the recipe that refers to
+                        the commit that is valid for a tag instead of
+                        the branch.
+                        </para></listitem>
+                    <listitem><para><emphasis>"bareclone":</emphasis>
+                        Tells the fetcher to clone a bare clone into the
+                        destination directory without checking out a working tree.
+                        Only the raw Git metadata is provided.
+                        This parameter implies the "nocheckout" parameter as well.
+                        </para></listitem>
+                    <listitem><para><emphasis>"branch":</emphasis>
+                        The branch(es) of the Git tree to clone.
+                        If unset, this is assumed to be "master".
+                        The number of branch parameters much match the number of
+                        name parameters.
+                        </para></listitem>
+                    <listitem><para><emphasis>"rev":</emphasis>
+                        The revision to use for the checkout.
+                        The default is "master".
+                        </para></listitem>
+                    <listitem><para><emphasis>"tag":</emphasis>
+                        Specifies a tag to use for the checkout.
+                        To correctly resolve tags, BitBake must access the
+                        network.
+                        For that reason, tags are often not used.
+                        As far as Git is concerned, the "tag" parameter behaves
+                        effectively the same as the "rev" parameter.
+                        </para></listitem>
+                    <listitem><para><emphasis>"subpath":</emphasis>
+                        Limits the checkout to a specific subpath of the tree.
+                        By default, the whole tree is checked out.
+                        </para></listitem>
+                    <listitem><para><emphasis>"destsuffix":</emphasis>
+                        The name of the path in which to place the checkout.
+                        By default, the path is <filename>git/</filename>.
+                        </para></listitem>
+                </itemizedlist>
+                Here are some example URLs:
+                <literallayout class='monospaced'>
+     SRC_URI = "git://git.oe.handhelds.org/git/vip.git;tag=version-1"
+     SRC_URI = "git://git.oe.handhelds.org/git/vip.git;protocol=http"
+                </literallayout>
+            </para>
+        </section>
+
+        <section id='gitsm-fetcher'>
+            <title>Git Submodule Fetcher (<filename>gitsm://</filename>)</title>
+
+            <para>
+                This fetcher submodule inherits from the
+                <link linkend='git-fetcher'>Git fetcher</link> and extends
+                that fetcher's behavior by fetching a repository's submodules.
+                <link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
+                is passed to the Git fetcher as described in the
+                "<link linkend='git-fetcher'>Git Fetcher (<filename>git://</filename>)</link>"
+                section.
+                <note>
+                    <title>Notes and Warnings</title>
+                    <para>
+                        You must clean a recipe when switching between
+                        '<filename>git://</filename>' and
+                        '<filename>gitsm://</filename>' URLs.
+                    </para>
+
+                    <para>
+                        The Git Submodules fetcher is not a complete fetcher
+                        implementation.
+                        The fetcher has known issues where it does not use the
+                        normal source mirroring infrastructure properly.
+                    </para>
+                </note>
+            </para>
+        </section>
+
+        <section id='clearcase-fetcher'>
+            <title>ClearCase Fetcher (<filename>ccrc://</filename>)</title>
+
+            <para>
+                This fetcher submodule fetches code from a
+                <ulink url='http://en.wikipedia.org/wiki/Rational_ClearCase'>ClearCase</ulink>
+                repository.
+            </para>
+
+            <para>
+                To use this fetcher, make sure your recipe has proper
+                <link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>,
+                <link linkend='var-SRCREV'><filename>SRCREV</filename></link>, and
+                <link linkend='var-PV'><filename>PV</filename></link> settings.
+                Here is an example:
+                <literallayout class='monospaced'>
+     SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
+     SRCREV = "EXAMPLE_CLEARCASE_TAG"
+     PV = "${@d.getVar("SRCREV", False).replace("/", "+")}"
+                </literallayout>
+                The fetcher uses the <filename>rcleartool</filename> or
+                <filename>cleartool</filename> remote client, depending on
+                which one is available.
+            </para>
+
+            <para>
+                Following are options for the <filename>SRC_URI</filename>
+                statement:
+                <itemizedlist>
+                    <listitem><para><emphasis><filename>vob</filename></emphasis>:
+                        The name, which must include the
+                        prepending "/" character, of the ClearCase VOB.
+                        This option is required.
+                        </para></listitem>
+                    <listitem><para><emphasis><filename>module</filename></emphasis>:
+                        The module, which must include the
+                        prepending "/" character, in the selected VOB.
+                        <note>
+                            The <filename>module</filename> and <filename>vob</filename>
+                            options are combined to create the <filename>load</filename> rule in
+                            the view config spec.
+                            As an example, consider the <filename>vob</filename> and 
+                            <filename>module</filename> values from the 
+                            <filename>SRC_URI</filename> statement at the start of this section.
+                            Combining those values results in the following:
+                            <literallayout class='monospaced'>
+     load /example_vob/example_module
+                            </literallayout>
+                        </note>
+                        </para></listitem>
+                    <listitem><para><emphasis><filename>proto</filename></emphasis>:
+                        The protocol, which can be either <filename>http</filename> or
+                        <filename>https</filename>.
+                        </para></listitem>
+                </itemizedlist>
+            </para>
+
+            <para>
+                By default, the fetcher creates a configuration specification.
+                If you want this specification written to an area other than the default,
+                use the <filename>CCASE_CUSTOM_CONFIG_SPEC</filename> variable
+                in your recipe to define where the specification is written.
+                <note>
+                    the <filename>SRCREV</filename> loses its functionality if you
+                    specify this variable.
+                    However, <filename>SRCREV</filename> is still used to label the
+                    archive after a fetch even though it does not define what is
+                    fetched.
+                </note>
+            </para>
+
+            <para>
+                Here are a couple of other behaviors worth mentioning:
+                <itemizedlist>
+                    <listitem><para>
+                        When using <filename>cleartool</filename>, the login of
+                        <filename>cleartool</filename> is handled by the system.
+                        The login require no special steps.
+                        </para></listitem>
+                    <listitem><para>
+                        In order to use <filename>rcleartool</filename> with authenticated
+                        users, an "rcleartool login" is necessary before using the fetcher.
+                        </para></listitem>
+                </itemizedlist>
+            </para>
+        </section>
+
+        <section id='other-fetchers'>
+            <title>Other Fetchers</title>
+
+            <para>
+                Fetch submodules also exist for the following:
+                <itemizedlist>
+                    <listitem><para>
+                        Bazaar (<filename>bzr://</filename>)
+                        </para></listitem>
+                    <listitem><para>
+                        Perforce (<filename>p4://</filename>)
+                        </para></listitem>
+                    <listitem><para>
+                        Trees using Git Annex (<filename>gitannex://</filename>)
+                        </para></listitem>
+                    <listitem><para>
+                        Secure FTP (<filename>sftp://</filename>)
+                        </para></listitem>
+                    <listitem><para>
+                        Secure Shell (<filename>ssh://</filename>)
+                        </para></listitem>
+                    <listitem><para>
+                        Repo (<filename>repo://</filename>)
+                        </para></listitem>
+                    <listitem><para>
+                        OSC (<filename>osc://</filename>)
+                        </para></listitem>
+                    <listitem><para>
+                        Mercurial (<filename>hg://</filename>)
+                        </para></listitem>
+                </itemizedlist>
+                No documentation currently exists for these lesser used
+                fetcher submodules.
+                However, you might find the code helpful and readable.
+            </para>
+        </section>
+    </section>
+
+    <section id='auto-revisions'>
+        <title>Auto Revisions</title>
+
+        <para>
+            We need to document <filename>AUTOREV</filename> and
+            <filename>SRCREV_FORMAT</filename> here.
+        </para>
+    </section>
+</chapter>

+ 505 - 0
bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml

@@ -0,0 +1,505 @@
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
+"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
+
+<appendix id='hello-world-example'>
+    <title>Hello World Example</title>
+
+    <section id='bitbake-hello-world'>
+        <title>BitBake Hello World</title>
+
+        <para>
+            The simplest example commonly used to demonstrate any new
+            programming language or tool is the
+            "<ulink url="http://en.wikipedia.org/wiki/Hello_world_program">Hello World</ulink>"
+            example.
+            This appendix demonstrates, in tutorial form, Hello
+            World within the context of BitBake.
+            The tutorial describes how to create a new project
+            and the applicable metadata files necessary to allow
+            BitBake to build it.
+        </para>
+    </section>
+
+    <section id='example-obtaining-bitbake'>
+        <title>Obtaining BitBake</title>
+
+        <para>
+            See the
+            "<link linkend='obtaining-bitbake'>Obtaining BitBake</link>"
+            section for information on how to obtain BitBake.
+            Once you have the source code on your machine, the BitBake directory
+            appears as follows:
+            <literallayout class='monospaced'>
+     $ ls -al
+     total 100
+     drwxrwxr-x. 9 wmat wmat  4096 Jan 31 13:44 .
+     drwxrwxr-x. 3 wmat wmat  4096 Feb  4 10:45 ..
+     -rw-rw-r--. 1 wmat wmat   365 Nov 26 04:55 AUTHORS
+     drwxrwxr-x. 2 wmat wmat  4096 Nov 26 04:55 bin
+     drwxrwxr-x. 4 wmat wmat  4096 Jan 31 13:44 build
+     -rw-rw-r--. 1 wmat wmat 16501 Nov 26 04:55 ChangeLog
+     drwxrwxr-x. 2 wmat wmat  4096 Nov 26 04:55 classes
+     drwxrwxr-x. 2 wmat wmat  4096 Nov 26 04:55 conf
+     drwxrwxr-x. 3 wmat wmat  4096 Nov 26 04:55 contrib
+     -rw-rw-r--. 1 wmat wmat 17987 Nov 26 04:55 COPYING
+     drwxrwxr-x. 3 wmat wmat  4096 Nov 26 04:55 doc
+     -rw-rw-r--. 1 wmat wmat    69 Nov 26 04:55 .gitignore
+     -rw-rw-r--. 1 wmat wmat   849 Nov 26 04:55 HEADER
+     drwxrwxr-x. 5 wmat wmat  4096 Jan 31 13:44 lib
+     -rw-rw-r--. 1 wmat wmat   195 Nov 26 04:55 MANIFEST.in
+     -rw-rw-r--. 1 wmat wmat  2887 Nov 26 04:55 TODO
+            </literallayout>
+        </para>
+
+        <para>
+            At this point, you should have BitBake cloned to
+            a directory that matches the previous listing except for
+            dates and user names.
+        </para>
+    </section>
+
+    <section id='setting-up-the-bitbake-environment'>
+        <title>Setting Up the BitBake Environment</title>
+
+        <para>
+            First, you need to be sure that you can run BitBake.
+            Set your working directory to where your local BitBake
+            files are and run the following command:
+            <literallayout class='monospaced'>
+     $ ./bin/bitbake --version
+     BitBake Build Tool Core version 1.23.0, bitbake version 1.23.0
+            </literallayout>
+            The console output tells you what version you are running.
+        </para>
+
+        <para>
+            The recommended method to run BitBake is from a directory of your
+            choice.
+            To be able to run BitBake from any directory, you need to add the
+            executable binary to your binary to your shell's environment
+            <filename>PATH</filename> variable.
+            First, look at your current <filename>PATH</filename> variable
+            by entering the following:
+            <literallayout class='monospaced'>
+     $ echo $PATH
+            </literallayout>
+            Next, add the directory location for the BitBake binary to the
+            <filename>PATH</filename>.
+            Here is an example that adds the
+            <filename>/home/scott-lenovo/bitbake/bin</filename> directory
+            to the front of the <filename>PATH</filename> variable:
+            <literallayout class='monospaced'>
+     $ export PATH=/home/scott-lenovo/bitbake/bin:$PATH
+            </literallayout>
+            You should now be able to enter the <filename>bitbake</filename>
+            command from the command line while working from any directory.
+        </para>
+    </section>
+
+    <section id='the-hello-world-example'>
+        <title>The Hello World Example</title>
+
+        <para>
+            The overall goal of this exercise is to build a
+            complete "Hello World" example utilizing task and layer
+            concepts.
+            Because this is how modern projects such as OpenEmbedded and
+            the Yocto Project utilize BitBake, the example
+            provides an excellent starting point for understanding
+            BitBake.
+        </para>
+
+        <para>
+            To help you understand how to use BitBake to build targets,
+            the example starts with nothing but the <filename>bitbake</filename>
+            command, which causes BitBake to fail and report problems.
+            The example progresses by adding pieces to the build to
+            eventually conclude with a working, minimal "Hello World"
+            example.
+        </para>
+
+        <para>
+            While every attempt is made to explain what is happening during
+            the example, the descriptions cannot cover everything.
+            You can find further information throughout this manual.
+            Also, you can actively participate in the
+            <ulink url='http://lists.openembedded.org/mailman/listinfo/bitbake-devel'></ulink>
+            discussion mailing list about the BitBake build tool.
+        </para>
+
+        <note>
+            This example was inspired by and drew heavily from these sources:
+            <itemizedlist>
+                <listitem><para>
+                    <ulink url="http://www.mail-archive.com/yocto@yoctoproject.org/msg09379.html">Mailing List post - The BitBake equivalent of "Hello, World!"</ulink>
+                    </para></listitem>
+                <listitem><para>
+                    <ulink url="http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/">Hambedded Linux blog post - From Bitbake Hello World to an Image</ulink>
+                    </para></listitem>
+            </itemizedlist>
+        </note>
+
+        <para>
+            As stated earlier, the goal of this example
+            is to eventually compile "Hello World".
+            However, it is unknown what BitBake needs and what you have
+            to provide in order to achieve that goal.
+            Recall that BitBake utilizes three types of metadata files:
+            <link linkend='configuration-files'>Configuration Files</link>,
+            <link linkend='classes'>Classes</link>, and
+            <link linkend='recipes'>Recipes</link>.
+            But where do they go?
+            How does BitBake find them?
+            BitBake's error messaging helps you answer these types of questions
+            and helps you better understand exactly what is going on.
+        </para>
+
+        <para>
+            Following is the complete "Hello World" example.
+        </para>
+
+        <orderedlist>
+            <listitem><para><emphasis>Create a Project Directory:</emphasis>
+                First, set up a directory for the "Hello World" project.
+                Here is how you can do so in your home directory:
+                <literallayout class='monospaced'>
+     $ mkdir ~/hello
+     $ cd ~/hello
+                </literallayout>
+                This is the directory that BitBake will use to do all of
+                its work.
+                You can use this directory to keep all the metafiles needed
+                by BitBake.
+                Having a project directory is a good way to isolate your
+                project.
+                </para></listitem>
+            <listitem><para><emphasis>Run Bitbake:</emphasis>
+                At this point, you have nothing but a project directory.
+                Run the <filename>bitbake</filename> command and see what
+                it does:
+                <literallayout class='monospaced'>
+     $ bitbake
+     The BBPATH variable is not set and bitbake did not
+     find a conf/bblayers.conf file in the expected location.
+     Maybe you accidentally invoked bitbake from the wrong directory?
+     DEBUG: Removed the following variables from the environment:
+     GNOME_DESKTOP_SESSION_ID, XDG_CURRENT_DESKTOP,
+     GNOME_KEYRING_CONTROL, DISPLAY, SSH_AGENT_PID, LANG, no_proxy,
+     XDG_SESSION_PATH, XAUTHORITY, SESSION_MANAGER, SHLVL,
+     MANDATORY_PATH, COMPIZ_CONFIG_PROFILE, WINDOWID, EDITOR,
+     GPG_AGENT_INFO, SSH_AUTH_SOCK, GDMSESSION, GNOME_KEYRING_PID,
+     XDG_SEAT_PATH, XDG_CONFIG_DIRS, LESSOPEN, DBUS_SESSION_BUS_ADDRESS,
+     _, XDG_SESSION_COOKIE, DESKTOP_SESSION, LESSCLOSE, DEFAULTS_PATH,
+     UBUNTU_MENUPROXY, OLDPWD, XDG_DATA_DIRS, COLORTERM, LS_COLORS
+                </literallayout>
+                The majority of this output is specific to environment variables
+                that are not directly relevant to BitBake.
+                However, the very first message regarding the
+                <filename>BBPATH</filename> variable and the
+                <filename>conf/bblayers.conf</filename> file
+                is relevant.</para>
+                <para>
+                When you run BitBake, it begins looking for metadata files.
+                The
+                <link linkend='var-BBPATH'><filename>BBPATH</filename></link>
+                variable is what tells BitBake where to look for those files.
+                <filename>BBPATH</filename> is not set and you need to set it.
+                Without <filename>BBPATH</filename>, Bitbake cannot
+                find any configuration files (<filename>.conf</filename>)
+                or recipe files (<filename>.bb</filename>) at all.
+                BitBake also cannot find the <filename>bitbake.conf</filename>
+                file.
+                </para></listitem>
+            <listitem><para><emphasis>Setting <filename>BBPATH</filename>:</emphasis>
+                For this example, you can set <filename>BBPATH</filename>
+                in the same manner that you set <filename>PATH</filename>
+                earlier in the appendix.
+                You should realize, though, that it is much more flexible to set the
+                <filename>BBPATH</filename> variable up in a configuration
+                file for each project.</para>
+                <para>From your shell, enter the following commands to set and
+                export the <filename>BBPATH</filename> variable:
+                <literallayout class='monospaced'>
+     $ BBPATH="<replaceable>projectdirectory</replaceable>"
+     $ export BBPATH
+                </literallayout>
+                Use your actual project directory in the command.
+                BitBake uses that directory to find the metadata it needs for
+                your project.
+                <note>
+                    When specifying your project directory, do not use the
+                    tilde ("~") character as BitBake does not expand that character
+                    as the shell would.
+                </note>
+                </para></listitem>
+            <listitem><para><emphasis>Run Bitbake:</emphasis>
+                Now that you have <filename>BBPATH</filename> defined, run
+                the <filename>bitbake</filename> command again:
+                <literallayout class='monospaced'>
+     $ bitbake
+     ERROR: Traceback (most recent call last):
+       File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 163, in wrapped
+         return func(fn, *args)
+       File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 173, in parse_config_file
+         return bb.parse.handle(fn, data, include)
+       File "/home/scott-lenovo/bitbake/lib/bb/parse/__init__.py", line 99, in handle
+         return h['handle'](fn, data, include)
+       File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/ConfHandler.py", line 120, in handle
+         abs_fn = resolve_file(fn, data)
+       File "/home/scott-lenovo/bitbake/lib/bb/parse/__init__.py", line 117, in resolve_file
+         raise IOError("file %s not found in %s" % (fn, bbpath))
+     IOError: file conf/bitbake.conf not found in /home/scott-lenovo/hello
+
+     ERROR: Unable to parse conf/bitbake.conf: file conf/bitbake.conf not found in /home/scott-lenovo/hello
+                </literallayout>
+                This sample output shows that BitBake could not find the
+                <filename>conf/bitbake.conf</filename> file in the project
+                directory.
+                This file is the first thing BitBake must find in order
+                to build a target.
+                And, since the project directory for this example is
+                empty, you need to provide a <filename>conf/bitbake.conf</filename>
+                file.
+                </para></listitem>
+            <listitem><para><emphasis>Creating <filename>conf/bitbake.conf</filename>:</emphasis>
+                The <filename>conf/bitbake.conf</filename> includes a number of
+                configuration variables BitBake uses for metadata and recipe
+                files.
+                For this example, you need to create the file in your project directory
+                and define some key BitBake variables.
+                For more information on the <filename>bitbake.conf</filename>,
+                see
+                <ulink url='http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#an-overview-of-bitbakeconf'></ulink>
+                </para>
+                <para>Use the following commands to create the <filename>conf</filename>
+                directory in the project directory:
+                <literallayout class='monospaced'>
+     $ mkdir conf
+                </literallayout>
+                From within the <filename>conf</filename> directory, use
+                some editor to create the <filename>bitbake.conf</filename>
+                so that it contains the following:
+                <literallayout class='monospaced'>
+     TMPDIR  = "${<link linkend='var-TOPDIR'>TOPDIR</link>}/tmp"
+     <link linkend='var-CACHE'>CACHE</link>   = "${TMPDIR}/cache"
+     <link linkend='var-STAMP'>STAMP</link>   = "${TMPDIR}/stamps"
+     <link linkend='var-T'>T</link>       = "${TMPDIR}/work"
+     <link linkend='var-B'>B</link>       = "${TMPDIR}"
+                </literallayout>
+                The <filename>TMPDIR</filename> variable establishes a directory
+                that BitBake uses for build output and intermediate files (other
+                than the cached information used by the
+                <link linkend='setscene'>Setscene</link> process.
+                Here, the <filename>TMPDIR</filename> directory is set to
+                <filename>hello/tmp</filename>.
+                <note><title>Tip</title>
+                    You can always safely delete the <filename>tmp</filename>
+                    directory in order to rebuild a BitBake target.
+                    The build process creates the directory for you
+                    when you run BitBake.
+                </note></para>
+                <para>For information about each of the other variables defined in this
+                example, click on the links to take you to the definitions in
+                the glossary.
+                </para></listitem>
+            <listitem><para><emphasis>Run Bitbake:</emphasis>
+                After making sure that the <filename>conf/bitbake.conf</filename>
+                file exists, you can run the <filename>bitbake</filename>
+                command again:
+                <literallayout class='monospaced'>
+$ bitbake
+ERROR: Traceback (most recent call last):
+  File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 163, in wrapped
+    return func(fn, *args)
+  File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 177, in _inherit
+    bb.parse.BBHandler.inherit(bbclass, "configuration INHERITs", 0, data)
+  File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/BBHandler.py", line 92, in inherit
+    include(fn, file, lineno, d, "inherit")
+  File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/ConfHandler.py", line 100, in include
+    raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno)
+ParseError: ParseError in configuration INHERITs: Could not inherit file classes/base.bbclass
+
+ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inherit file classes/base.bbclass
+                </literallayout>
+                In the sample output, BitBake could not find the
+                <filename>classes/base.bbclass</filename> file.
+                You need to create that file next.
+                </para></listitem>
+            <listitem><para><emphasis>Creating <filename>classes/base.bbclass</filename>:</emphasis>
+                BitBake uses class files to provide common code and functionality.
+                The minimally required class for BitBake is the
+                <filename>classes/base.bbclass</filename> file.
+                The <filename>base</filename> class is implicitly inherited by
+                every recipe.
+                BitBake looks for the class in the <filename>classes</filename>
+                directory of the project (i.e <filename>hello/classes</filename>
+                in this example).
+                </para>
+                <para>Create the <filename>classes</filename> directory as follows:
+                <literallayout class='monospaced'>
+     $ cd $HOME/hello
+     $ mkdir classes
+                </literallayout>
+                Move to the <filename>classes</filename> directory and then
+                create the <filename>base.bbclass</filename> file by inserting
+                this single line:
+                <literallayout class='monospaced'>
+     addtask build
+                </literallayout>
+                The minimal task that BitBake runs is the
+                <filename>do_build</filename> task.
+                This is all the example needs in order to build the project.
+                Of course, the <filename>base.bbclass</filename> can have much
+                more depending on which build environments BitBake is
+                supporting.
+                For more information on the <filename>base.bbclass</filename> file,
+                you can look at
+                <ulink url='http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#tasks'></ulink>.
+                </para></listitem>
+            <listitem><para><emphasis>Run Bitbake:</emphasis>
+                After making sure that the <filename>classes/base.bbclass</filename>
+                file exists, you can run the <filename>bitbake</filename>
+                command again:
+                <literallayout class='monospaced'>
+     $ bitbake
+     Nothing to do.  Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.
+                </literallayout>
+                BitBake is finally reporting no errors.
+                However, you can see that it really does not have anything
+                to do.
+                You need to create a recipe that gives BitBake something to do.
+                </para></listitem>
+            <listitem><para><emphasis>Creating a Layer:</emphasis>
+                While it is not really necessary for such a small example,
+                it is good practice to create a layer in which to keep your
+                code separate from the general metadata used by BitBake.
+                Thus, this example creates and uses a layer called "mylayer".
+                <note>
+                    You can find additional information on adding a layer at
+                    <ulink url='http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#adding-an-example-layer'></ulink>.
+                </note>
+                </para>
+                <para>Minimally, you need a recipe file and a layer configuration
+                file in your layer.
+                The configuration file needs to be in the <filename>conf</filename>
+                directory inside the layer.
+                Use these commands to set up the layer and the <filename>conf</filename>
+                directory:
+                <literallayout class='monospaced'>
+     $ cd $HOME
+     $ mkdir mylayer
+     $ cd mylayer
+     $ mkdir conf
+                </literallayout>
+                Move to the <filename>conf</filename> directory and create a
+                <filename>layer.conf</filename> file that has the following:
+                <literallayout class='monospaced'>
+     BBPATH .= ":${<link linkend='var-LAYERDIR'>LAYERDIR</link>}"
+
+     <link linkend='var-BBFILES'>BBFILES</link> += "${LAYERDIR}/*.bb"
+
+     <link linkend='var-BBFILE_COLLECTIONS'>BBFILE_COLLECTIONS</link> += "mylayer"
+     <link linkend='var-BBFILE_PATTERN'>BBFILE_PATTERN_mylayer</link> := "^${LAYERDIR}/"
+                </literallayout>
+                For information on these variables, click the links
+                to go to the definitions in the glossary.</para>
+                <para>You need to create the recipe file next.
+                Inside your layer at the top-level, use an editor and create
+                a recipe file named <filename>printhello.bb</filename> that
+                has the following:
+                <literallayout class='monospaced'>
+     <link linkend='var-DESCRIPTION'>DESCRIPTION</link> = "Prints Hello World"
+     <link linkend='var-PN'>PN</link> = 'printhello'
+     <link linkend='var-PV'>PV</link> = '1'
+
+     python do_build() {
+        bb.plain("********************");
+        bb.plain("*                  *");
+        bb.plain("*  Hello, World!   *");
+        bb.plain("*                  *");
+        bb.plain("********************");
+     }
+                </literallayout>
+                The recipe file simply provides a description of the
+                recipe, the name, version, and the <filename>do_build</filename>
+                task, which prints out "Hello World" to the console.
+                For more information on these variables, follow the links
+                to the glossary.
+                </para></listitem>
+            <listitem><para><emphasis>Run Bitbake With a Target:</emphasis>
+                Now that a BitBake target exists, run the command and provide
+                that target:
+                <literallayout class='monospaced'>
+     $ cd $HOME/hello
+     $ bitbake printhello
+     ERROR: no recipe files to build, check your BBPATH and BBFILES?
+
+     Summary: There was 1 ERROR message shown, returning a non-zero exit code.
+                </literallayout>
+                We have created the layer with the recipe and the layer
+                configuration file but it still seems that BitBake cannot
+                find the recipe.
+                BitBake needs a <filename>conf/bblayers.conf</filename> that
+                lists the layers for the project.
+                Without this file, BitBake cannot find the recipe.
+                </para></listitem>
+            <listitem><para><emphasis>Creating <filename>conf/bblayers.conf</filename>:</emphasis>
+                BitBake uses the <filename>conf/bblayers.conf</filename> file
+                to locate layers needed for the project.
+                This file must reside in the <filename>conf</filename> directory
+                of the project (i.e. <filename>hello/conf</filename> for this
+                example).</para>
+                <para>Set your working directory to the <filename>hello/conf</filename>
+                directory and then create the <filename>bblayers.conf</filename>
+                file so that it contains the following:
+                <literallayout class='monospaced'>
+     BBLAYERS ?= " \
+       /home/&lt;you&gt;/mylayer \
+       "
+                </literallayout>
+                You need to provide your own information for
+                <filename>you</filename> in the file.
+                </para></listitem>
+            <listitem><para><emphasis>Run Bitbake With a Target:</emphasis>
+                Now that you have supplied the <filename>bblayers.conf</filename>
+                file, run the <filename>bitbake</filename> command and provide
+                the target:
+                <literallayout class='monospaced'>
+     $ bitbake printhello
+     Parsing recipes: 100% |##################################################################################|
+     Time: 00:00:00
+     Parsing of 1 .bb files complete (0 cached, 1 parsed). 1 targets, 0 skipped, 0 masked, 0 errors.
+     NOTE: Resolving any missing task queue dependencies
+     NOTE: Preparing RunQueue
+     NOTE: Executing RunQueue Tasks
+     ********************
+     *                  *
+     *  Hello, World!   *
+     *                  *
+     ********************
+     NOTE: Tasks Summary: Attempted 1 tasks of which 0 didn't need to be rerun and all succeeded.
+                </literallayout>
+                BitBake finds the <filename>printhello</filename> recipe and
+                successfully runs the task.
+                <note>
+                    After the first execution, re-running
+                    <filename>bitbake printhello</filename> again will not
+                    result in a BitBake run that prints the same console
+                    output.
+                    The reason for this is that the first time the
+                    <filename>printhello.bb</filename> recipe's
+                    <filename>do_build</filename> task executes
+                    successfully, BitBake writes a stamp file for the task.
+                    Thus, the next time you attempt to run the task
+                    using that same <filename>bitbake</filename> command,
+                    BitBake notices the stamp and therefore determines
+                    that the task does not need to be re-run.
+                    If you delete the <filename>tmp</filename> directory
+                    or run <filename>bitbake -c clean printhello</filename>
+                    and then re-run the build, the "Hello, World!" message will
+                    be printed again.
+                </note>
+                </para></listitem>
+        </orderedlist>
+    </section>
+</appendix>

+ 685 - 0
bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.xml

@@ -0,0 +1,685 @@
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
+    "http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
+
+<chapter id="bitbake-user-manual-intro">
+    <title>Overview</title>
+
+    <para>
+        Welcome to the BitBake User Manual.
+        This manual provides information on the BitBake tool.
+        The information attempts to be as independent as possible regarding
+        systems that use BitBake, such as OpenEmbedded and the
+        Yocto Project.
+        In some cases, scenarios or examples within the context of
+        a build system are used in the manual to help with understanding.
+        For these cases, the manual clearly states the context.
+    </para>
+
+    <section id="intro">
+        <title>Introduction</title>
+
+        <para>
+            Fundamentally, BitBake is a generic task execution
+            engine that allows shell and Python tasks to be run
+            efficiently and in parallel while working within
+            complex inter-task dependency constraints.
+            One of BitBake's main users, OpenEmbedded, takes this core
+            and builds embedded Linux software stacks using
+            a task-oriented approach.
+        </para>
+
+        <para>
+            Conceptually, BitBake is similar to GNU Make in
+            some regards but has significant differences:
+            <itemizedlist>
+                <listitem><para>
+                    BitBake executes tasks according to provided
+                    metadata that builds up the tasks.
+                    Metadata is stored in recipe (<filename>.bb</filename>)
+                    and related recipe "append" (<filename>.bbappend</filename>)
+                    files, configuration (<filename>.conf</filename>) and
+                    underlying include (<filename>.inc</filename>) files, and
+                    in class (<filename>.bbclass</filename>) files.
+                    The metadata provides
+                    BitBake with instructions on what tasks to run and
+                    the dependencies between those tasks.
+                    </para></listitem>
+                <listitem><para>
+                    BitBake includes a fetcher library for obtaining source
+                    code from various places such as local files, source control
+                    systems, or websites.
+                    </para></listitem>
+                <listitem><para>
+                    The instructions for each unit to be built (e.g. a piece
+                    of software) are known as "recipe" files and
+                    contain all the information about the unit
+                    (dependencies, source file locations, checksums, description
+                    and so on).
+                    </para></listitem>
+                <listitem><para>
+                    BitBake includes a client/server abstraction and can
+                    be used from a command line or used as a service over
+                    XML-RPC and has several different user interfaces.
+                    </para></listitem>
+            </itemizedlist>
+        </para>
+    </section>
+
+    <section id="history-and-goals">
+        <title>History and Goals</title>
+
+        <para>
+            BitBake was originally a part of the OpenEmbedded project.
+            It was inspired by the Portage package management system
+            used by the Gentoo Linux distribution.
+            On December 7, 2004, OpenEmbedded project team member
+            Chris Larson split the project into two distinct pieces:
+            <itemizedlist>
+                <listitem><para>BitBake, a generic task executor</para></listitem>
+                <listitem><para>OpenEmbedded, a metadata set utilized by
+                    BitBake</para></listitem>
+            </itemizedlist>
+            Today, BitBake is the primary basis of the
+            <ulink url="http://www.openembedded.org/">OpenEmbedded</ulink>
+            project, which is being used to build and maintain Linux
+            distributions such as the
+            <ulink url='http://www.angstrom-distribution.org/'>Angstrom Distribution</ulink>,
+            and which is also being used as the build tool for Linux projects
+            such as the
+            <ulink url='http://www.yoctoproject.org'>Yocto Project</ulink>.
+        </para>
+
+        <para>
+            Prior to BitBake, no other build tool adequately met the needs of
+            an aspiring embedded Linux distribution.
+            All of the build systems used by traditional desktop Linux
+            distributions lacked important functionality, and none of the
+            ad hoc Buildroot-based systems, prevalent in the
+            embedded space, were scalable or maintainable.
+        </para>
+
+        <para>
+            Some important original goals for BitBake were:
+            <itemizedlist>
+                <listitem><para>
+                    Handle cross-compilation.
+                    </para></listitem>
+                <listitem><para>
+                    Handle inter-package dependencies (build time on
+                    target architecture, build time on native
+                    architecture, and runtime).
+                    </para></listitem>
+                <listitem><para>
+                    Support running any number of tasks within a given
+                    package, including, but not limited to, fetching
+                    upstream sources, unpacking them, patching them,
+                    configuring them, and so forth.
+                    </para></listitem>
+                <listitem><para>
+                    Be Linux distribution agnostic for both build and
+                    target systems.
+                    </para></listitem>
+                <listitem><para>
+                    Be architecture agnostic.
+                    </para></listitem>
+                <listitem><para>
+                    Support multiple build and target operating systems
+                    (e.g. Cygwin, the BSDs, and so forth).
+                    </para></listitem>
+                <listitem><para>
+                    Be self contained, rather than tightly
+                    integrated into the build machine's root
+                    filesystem.
+                    </para></listitem>
+                <listitem><para>
+                    Handle conditional metadata on the target architecture,
+                    operating system, distribution, and machine.
+                    </para></listitem>
+                <listitem><para>
+                    Be easy to use the tools to supply local metadata and packages
+                    against which to operate.
+                    </para></listitem>
+                <listitem><para>
+                    Be easy to use BitBake to collaborate between multiple
+                    projects for their builds.
+                    </para></listitem>
+                <listitem><para>
+                    Provide an inheritance mechanism to share
+                    common metadata between many packages.
+                    </para></listitem>
+            </itemizedlist>
+            Over time it became apparent that some further requirements
+            were necessary:
+            <itemizedlist>
+                <listitem><para>
+                    Handle variants of a base recipe (e.g. native, sdk,
+                    and multilib).
+                    </para></listitem>
+                <listitem><para>
+                    Split metadata into layers and allow layers
+                    to enhance or override other layers.
+                    </para></listitem>
+                <listitem><para>
+                    Allow representation of a given set of input variables
+                    to a task as a checksum.
+                    Based on that checksum, allow acceleration of builds
+                    with prebuilt components.
+                    </para></listitem>
+            </itemizedlist>
+            BitBake satisfies all the original requirements and many more
+            with extensions being made to the basic functionality to
+            reflect the additional requirements.
+            Flexibility and power have always been the priorities.
+            BitBake is highly extensible and supports embedded Python code and
+            execution of any arbitrary tasks.
+        </para>
+    </section>
+
+    <section id="Concepts">
+        <title>Concepts</title>
+
+        <para>
+            BitBake is a program written in the Python language.
+            At the highest level, BitBake interprets metadata, decides
+            what tasks are required to run, and executes those tasks.
+            Similar to GNU Make, BitBake controls how software is
+            built.
+            GNU Make achieves its control through "makefiles", while
+            BitBake uses "recipes".
+        </para>
+
+        <para>
+            BitBake extends the capabilities of a simple
+            tool like GNU Make by allowing for the definition of much more
+            complex tasks, such as assembling entire embedded Linux
+            distributions.
+        </para>
+
+        <para>
+            The remainder of this section introduces several concepts
+            that should be understood in order to better leverage
+            the power of BitBake.
+        </para>
+
+        <section id='recipes'>
+            <title>Recipes</title>
+
+            <para>
+                BitBake Recipes, which are denoted by the file extension
+                <filename>.bb</filename>, are the most basic metadata files.
+                These recipe files provide BitBake with the following:
+                <itemizedlist>
+                    <listitem><para>Descriptive information about the
+                        package (author, homepage, license, and so on)</para></listitem>
+                    <listitem><para>The version of the recipe</para></listitem>
+                    <listitem><para>Existing dependencies (both build
+                        and runtime dependencies)</para></listitem>
+                    <listitem><para>Where the source code resides and
+                        how to fetch it</para></listitem>
+                    <listitem><para>Whether the source code requires
+                        any patches, where to find them, and how to apply
+                        them</para></listitem>
+                    <listitem><para>How to configure and compile the
+                        source code</para></listitem>
+                    <listitem><para>Where on the target machine to install the
+                        package or packages created</para></listitem>
+                </itemizedlist>
+            </para>
+
+            <para>
+                Within the context of BitBake, or any project utilizing BitBake
+                as its build system, files with the <filename>.bb</filename>
+                extension are referred to as recipes.
+                <note>
+                    The term "package" is also commonly used to describe recipes.
+                    However, since the same word is used to describe packaged
+                    output from a project, it is best to maintain a single
+                    descriptive term - "recipes".
+                    Put another way, a single "recipe" file is quite capable
+                    of generating a number of related but separately installable
+                    "packages".
+                    In fact, that ability is fairly common.
+                </note>
+            </para>
+        </section>
+
+        <section id='configuration-files'>
+            <title>Configuration Files</title>
+
+            <para>
+                Configuration files, which are denoted by the
+                <filename>.conf</filename> extension, define
+                various configuration variables that govern the project's build
+                process.
+                These files fall into several areas that define
+                machine configuration options, distribution configuration
+                options, compiler tuning options, general common
+                configuration options, and user configuration options.
+                The main configuration file is the sample
+                <filename>bitbake.conf</filename> file, which is
+                located within the BitBake source tree
+                <filename>conf</filename> directory.
+            </para>
+        </section>
+
+        <section id='classes'>
+            <title>Classes</title>
+
+            <para>
+                Class files, which are denoted by the
+                <filename>.bbclass</filename> extension, contain
+                information that is useful to share between metadata files.
+                The BitBake source tree currently comes with one class metadata file
+                called <filename>base.bbclass</filename>.
+                You can find this file in the
+                <filename>classes</filename> directory.
+                The <filename>base.bbclass</filename> class files is special since it
+                is always included automatically for all recipes
+                and classes.
+                This class contains definitions for standard basic tasks such
+                as fetching, unpacking, configuring (empty by default),
+                compiling (runs any Makefile present), installing (empty by
+                default) and packaging (empty by default).
+                These tasks are often overridden or extended by other classes
+                added during the project development process.
+            </para>
+        </section>
+
+        <section id='layers'>
+            <title>Layers</title>
+
+            <para>
+                Layers allow you to isolate different types of
+                customizations from each other.
+                While you might find it tempting to keep everything in one layer
+                when working on a single project, the more modular you organize
+                your metadata, the easier it is to cope with future changes.
+            </para>
+
+            <para>
+                To illustrate how you can use layers to keep things modular,
+                consider customizations you might make to support a specific target machine.
+                These types of customizations typically reside in a special layer,
+                rather than a general layer, called a Board Support Package (BSP)
+                Layer.
+                Furthermore, the machine customizations should be isolated from
+                recipes and metadata that support a new GUI environment, for
+                example.
+                This situation gives you a couple of layers: one for the machine
+                configurations and one for the GUI environment.
+                It is important to understand, however, that the BSP layer can still
+                make machine-specific additions to recipes within
+                the GUI environment layer without polluting the GUI layer itself
+                with those machine-specific changes.
+                You can accomplish this through a recipe that is a BitBake append
+                (<filename>.bbappend</filename>) file.
+            </para>
+        </section>
+
+        <section id='append-bbappend-files'>
+            <title>Append Files</title>
+
+            <para>
+                Append files, which are files that have the
+                <filename>.bbappend</filename> file extension, extend or
+                override information in an existing recipe file.
+            </para>
+
+            <para>
+                BitBake expects every append file to have a corresponding recipe file.
+                Furthermore, the append file and corresponding recipe file
+                must use the same root filename.
+                The filenames can differ only in the file type suffix used
+                (e.g. <filename>formfactor_0.0.bb</filename> and
+                <filename>formfactor_0.0.bbappend</filename>).
+            </para>
+
+            <para>
+                Information in append files extends or
+                overrides the information in the underlying,
+                similarly-named recipe files.
+            </para>
+
+            <para>
+                When you name an append file, you can use the
+                wildcard character (%) to allow for matching recipe names.
+                For example, suppose you have an append file named
+                as follows:
+                <literallayout class='monospaced'>
+     busybox_1.21.%.bbappend
+                </literallayout>
+                That append file would match any <filename>busybox_1.21.x.bb</filename>
+                version of the recipe.
+                So, the append file would match the following recipe names:
+                <literallayout class='monospaced'>
+     busybox_1.21.1.bb
+     busybox_1.21.2.bb
+     busybox_1.21.3.bb
+                </literallayout>
+                If the <filename>busybox</filename> recipe was updated to
+                <filename>busybox_1.3.0.bb</filename>, the append name would not
+                match.
+                However, if you named the append file
+                <filename>busybox_1.%.bbappend</filename>, then you would have a match.
+            </para>
+
+            <para>
+                In the most general case, you could name the append file something as
+                simple as <filename>busybox_%.bbappend</filename> to be entirely
+                version independent.
+            </para>
+        </section>
+    </section>
+
+    <section id='obtaining-bitbake'>
+        <title>Obtaining BitBake</title>
+
+        <para>
+            You can obtain BitBake several different ways:
+            <itemizedlist>
+                <listitem><para><emphasis>Cloning BitBake:</emphasis>
+                    Using Git to clone the BitBake source code repository
+                    is the recommended method for obtaining BitBake.
+                    Cloning the repository makes it easy to get bug fixes
+                    and have access to stable branches and the master
+                    branch.
+                    Once you have cloned BitBake, you should use
+                    the latest stable
+                    branch for development since the master branch is for
+                    BitBake development and might contain less stable changes.
+                    </para>
+                    <para>You usually need a version of BitBake
+                    that matches the metadata you are using.
+                    The metadata is generally backwards compatible but
+                    not forward compatible.</para>
+                    <para>Here is an example that clones the BitBake repository:
+                    <literallayout class='monospaced'>
+     $ git clone git://git.openembedded.org/bitbake
+                    </literallayout>
+                    This command clones the BitBake Git repository into a
+                    directory called <filename>bitbake</filename>.
+                    Alternatively, you can
+                    designate a directory after the
+                    <filename>git clone</filename> command
+                    if you want to call the new directory something
+                    other than <filename>bitbake</filename>.
+                    Here is an example that names the directory
+                    <filename>bbdev</filename>:
+                    <literallayout class='monospaced'>
+     $ git clone git://git.openembedded.org/bitbake bbdev
+                    </literallayout></para></listitem>
+                <listitem><para><emphasis>Installation using your Distribution
+                    Package Management System:</emphasis>
+                    This method is not
+                    recommended because the BitBake version that is
+                    provided by your distribution, in most cases,
+                    is several
+                    releases behind a snapshot of the BitBake repository.
+                    </para></listitem>
+                <listitem><para><emphasis>Taking a snapshot of BitBake:</emphasis>
+                    Downloading a snapshot of BitBake from the
+                    source code repository gives you access to a known
+                    branch or release of BitBake.
+                    <note>
+                         Cloning the Git repository, as described earlier,
+                         is the preferred method for getting BitBake.
+                         Cloning the repository makes it easier to update as
+                         patches are added to the stable branches.
+                    </note></para>
+                    <para>The following example downloads a snapshot of
+                    BitBake version 1.17.0:
+                    <literallayout class='monospaced'>
+     $ wget http://git.openembedded.org/bitbake/snapshot/bitbake-1.17.0.tar.gz
+     $ tar zxpvf bitbake-1.17.0.tar.gz
+                    </literallayout>
+                    After extraction of the tarball using the tar utility,
+                    you have a directory entitled
+                    <filename>bitbake-1.17.0</filename>.
+                    </para></listitem>
+                <listitem><para><emphasis>Using the BitBake that Comes With Your
+                    Build Checkout:</emphasis>
+                    A final possibility for getting a copy of BitBake is that it
+                    already comes with your checkout of a larger Bitbake-based build
+                    system, such as Poky or Yocto Project.
+                    Rather than manually checking out individual layers and
+                    gluing them together yourself, you can check
+                    out an entire build system.
+                    The checkout will already include a version of BitBake that
+                    has been thoroughly tested for compatibility with the other
+                    components.
+                    For information on how to check out a particular BitBake-based
+                    build system, consult that build system's supporting documentation.
+                    </para></listitem>
+            </itemizedlist>
+        </para>
+    </section>
+
+    <section id="bitbake-user-manual-command">
+        <title>The BitBake Command</title>
+
+        <para>
+            The <filename>bitbake</filename> command is the primary interface
+            to the BitBake tool.
+            This section presents the BitBake command syntax and provides
+            several execution examples.
+        </para>
+
+        <section id='usage-and-syntax'>
+            <title>Usage and syntax</title>
+
+            <para>
+                Following is the usage and syntax for BitBake:
+                <literallayout class='monospaced'>
+     $ bitbake -h
+     Usage: bitbake [options] [recipename/target ...]
+
+         Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
+         It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
+         will provide the layer, BBFILES and other configuration information.
+
+     Options:
+       --version             show program's version number and exit
+       -h, --help            show this help message and exit
+       -b BUILDFILE, --buildfile=BUILDFILE
+                             Execute tasks from a specific .bb recipe directly.
+                             WARNING: Does not handle any dependencies from other
+                             recipes.
+       -k, --continue        Continue as much as possible after an error. While the
+                             target that failed and anything depending on it cannot
+                             be built, as much as possible will be built before
+                             stopping.
+       -a, --tryaltconfigs   Continue with builds by trying to use alternative
+                             providers where possible.
+       -f, --force           Force the specified targets/task to run (invalidating
+                             any existing stamp file).
+       -c CMD, --cmd=CMD     Specify the task to execute. The exact options
+                             available depend on the metadata. Some examples might
+                             be 'compile' or 'populate_sysroot' or 'listtasks' may
+                             give a list of the tasks available.
+       -C INVALIDATE_STAMP, --clear-stamp=INVALIDATE_STAMP
+                             Invalidate the stamp for the specified task such as
+                             'compile' and then run the default task for the
+                             specified target(s).
+       -r PREFILE, --read=PREFILE
+                             Read the specified file before bitbake.conf.
+       -R POSTFILE, --postread=POSTFILE
+                             Read the specified file after bitbake.conf.
+       -v, --verbose         Output more log message data to the terminal.
+       -D, --debug           Increase the debug level. You can specify this more
+                             than once.
+       -n, --dry-run         Don't execute, just go through the motions.
+       -S SIGNATURE_HANDLER, --dump-signatures=SIGNATURE_HANDLER
+                             Dump out the signature construction information, with
+                             no task execution. The SIGNATURE_HANDLER parameter is
+                             passed to the handler. Two common values are none and
+                             printdiff but the handler may define more/less. none
+                             means only dump the signature, printdiff means compare
+                             the dumped signature with the cached one.
+       -p, --parse-only      Quit after parsing the BB recipes.
+       -s, --show-versions   Show current and preferred versions of all recipes.
+       -e, --environment     Show the global or per-recipe environment complete
+                             with information about where variables were
+                             set/changed.
+       -g, --graphviz        Save dependency tree information for the specified
+                             targets in the dot syntax.
+       -I EXTRA_ASSUME_PROVIDED, --ignore-deps=EXTRA_ASSUME_PROVIDED
+                             Assume these dependencies don't exist and are already
+                             provided (equivalent to ASSUME_PROVIDED). Useful to
+                             make dependency graphs more appealing
+       -l DEBUG_DOMAINS, --log-domains=DEBUG_DOMAINS
+                             Show debug logging for the specified logging domains
+       -P, --profile         Profile the command and save reports.
+       -u UI, --ui=UI        The user interface to use (e.g. knotty, hob, depexp).
+       -t SERVERTYPE, --servertype=SERVERTYPE
+                             Choose which server to use, process or xmlrpc.
+       --token=XMLRPCTOKEN   Specify the connection token to be used when
+                             connecting to a remote server.
+       --revisions-changed   Set the exit code depending on whether upstream
+                             floating revisions have changed or not.
+       --server-only         Run bitbake without a UI, only starting a server
+                             (cooker) process.
+       -B BIND, --bind=BIND  The name/address for the bitbake server to bind to.
+       --no-setscene         Do not run any setscene tasks. sstate will be ignored
+                             and everything needed, built.
+       --remote-server=REMOTE_SERVER
+                             Connect to the specified server.
+       -m, --kill-server     Terminate the remote server.
+       --observe-only        Connect to a server as an observing-only client.
+       --status-only         Check the status of the remote bitbake server.
+                </literallayout>
+            </para>
+        </section>
+
+        <section id='bitbake-examples'>
+            <title>Examples</title>
+
+            <para>
+                This section presents some examples showing how to use BitBake.
+            </para>
+
+            <section id='example-executing-a-task-against-a-single-recipe'>
+                <title>Executing a Task Against a Single Recipe</title>
+
+                <para>
+                    Executing tasks for a single recipe file is relatively simple.
+                    You specify the file in question, and BitBake parses
+                    it and executes the specified task.
+                    If you do not specify a task, BitBake executes the default
+                    task, which is "build”.
+                    BitBake obeys inter-task dependencies when doing
+                    so.
+                </para>
+
+                <para>
+                    The following command runs the build task, which is
+                    the default task, on the <filename>foo_1.0.bb</filename>
+                    recipe file:
+                    <literallayout class='monospaced'>
+     $ bitbake -b foo_1.0.bb
+                    </literallayout>
+                    The following command runs the clean task on the
+                    <filename>foo.bb</filename> recipe file:
+                    <literallayout class='monospaced'>
+     $ bitbake -b foo.bb -c clean
+                    </literallayout>
+                    <note>
+                        The "-b" option explicitly does not handle recipe
+                        dependencies.
+                        Other than for debugging purposes, it is instead
+                        recommended that you use the syntax presented in the
+                        next section.
+                    </note>
+                </para>
+            </section>
+
+            <section id='executing-tasks-against-a-set-of-recipe-files'>
+                <title>Executing Tasks Against a Set of Recipe Files</title>
+
+                <para>
+                    There are a number of additional complexities introduced
+                    when one wants to manage multiple <filename>.bb</filename>
+                    files.
+                    Clearly there needs to be a way to tell BitBake what
+                    files are available and, of those, which you
+                    want to execute.
+                    There also needs to be a way for each recipe
+                    to express its dependencies, both for build-time and
+                    runtime.
+                    There must be a way for you to express recipe preferences
+                    when multiple recipes provide the same functionality, or when
+                    there are multiple versions of a recipe.
+                </para>
+
+                <para>
+                    The <filename>bitbake</filename> command, when not using
+                    "--buildfile" or "-b" only accepts a "PROVIDES".
+                    You cannot provide anything else.
+                    By default, a recipe file generally "PROVIDES" its
+                    "packagename" as shown in the following example:
+                    <literallayout class='monospaced'>
+     $ bitbake foo
+                    </literallayout>
+                    This next example "PROVIDES" the package name and also uses
+                    the "-c" option to tell BitBake to just execute the
+                    <filename>do_clean</filename> task:
+                    <literallayout class='monospaced'>
+     $ bitbake -c clean foo
+                    </literallayout>
+                </para>
+            </section>
+
+            <section id='generating-dependency-graphs'>
+                <title>Generating Dependency Graphs</title>
+
+                <para>
+                    BitBake is able to generate dependency graphs using
+                    the <filename>dot</filename> syntax.
+                    You can convert these graphs into images using the
+                    <filename>dot</filename> tool from
+                    <ulink url='http://www.graphviz.org'>Graphviz</ulink>.
+                </para>
+
+                <para>
+                    When you generate a dependency graph, BitBake writes four files
+                    to the current working directory:
+                    <itemizedlist>
+                        <listitem><para><emphasis><filename>package-depends.dot</filename>:</emphasis>
+                            Shows BitBake's knowledge of dependencies between
+                            runtime targets.
+                            </para></listitem>
+                        <listitem><para><emphasis><filename>pn-depends.dot</filename>:</emphasis>
+                            Shows dependencies between build-time targets
+                            (i.e. recipes).
+                            </para></listitem>
+                        <listitem><para><emphasis><filename>task-depends.dot</filename>:</emphasis>
+                            Shows dependencies between tasks.
+                            </para></listitem>
+                        <listitem><para><emphasis><filename>pn-buildlist</filename>:</emphasis>
+                            Shows a simple list of targets that are to be built.
+                            </para></listitem>
+                    </itemizedlist>
+                </para>
+
+                <para>
+                    To stop depending on common depends, use the "-I" depend
+                    option and BitBake omits them from the graph.
+                    Leaving this information out can produce more readable graphs.
+                    This way, you can remove from the graph
+                    <filename>DEPENDS</filename> from inherited classes
+                    such as <filename>base.bbclass</filename>.
+                </para>
+
+                <para>
+                    Here are two examples that create dependency graphs.
+                    The second example omits depends common in OpenEmbedded from
+                    the graph:
+                    <literallayout class='monospaced'>
+     $ bitbake -g foo
+
+     $ bitbake -g -I virtual/kernel -I eglibc foo
+                    </literallayout>
+                </para>
+            </section>
+        </section>
+    </section>
+</chapter>

+ 1852 - 0
bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml

@@ -0,0 +1,1852 @@
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
+"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
+
+<chapter id="bitbake-user-manual-metadata">
+    <title>Syntax and Operators</title>
+
+    <para>
+        Bitbake files have their own syntax.
+        The syntax has similarities to several
+        other languages but also has some unique features.
+        This section describes the available syntax and operators
+        as well as provides examples.
+    </para>
+
+    <section id='basic-syntax'>
+        <title>Basic Syntax</title>
+
+        <para>
+            This section provides some basic syntax examples.
+        </para>
+
+        <section id='basic-variable-setting'>
+            <title>Basic Variable Setting</title>
+
+            <para>
+                The following example sets <filename>VARIABLE</filename> to
+                "value".
+                This assignment occurs immediately as the statement is parsed.
+                It is a "hard" assignment.
+                <literallayout class='monospaced'>
+     VARIABLE = "value"
+                </literallayout>
+                As expected, if you include leading or trailing spaces as part of
+                an assignment, the spaces are retained:
+                <literallayout class='monospaced'>
+     VARIABLE = " value"
+     VARIABLE = "value "
+                </literallayout>
+                Setting <filename>VARIABLE</filename> to "" sets it to an empty string,
+                while setting the variable to " " sets it to a blank space
+                (i.e. these are not the same values).
+                <literallayout class='monospaced'>
+     VARIABLE = ""
+     VARIABLE = " "
+                </literallayout>
+            </para>
+        </section>
+
+        <section id='variable-expansion'>
+            <title>Variable Expansion</title>
+
+            <para>
+                BitBake supports variables referencing one another's
+                contents using a syntax that is similar to shell scripting.
+                Following is an example that results in <filename>A</filename>
+                containing "aval" and <filename>B</filename> evaluating to
+                "preavalpost" based on that current value of
+                <filename>A</filename>.
+                <literallayout class='monospaced'>
+     A = "aval"
+     B = "pre${A}post"
+                </literallayout>
+                You should realize that whenever <filename>B</filename> is
+                referenced, its evaluation will depend on the state of
+                <filename>A</filename> at that time.
+                Thus, later evaluations of <filename>B</filename> in the
+                previous example could result in different values
+                depending on the value of <filename>A</filename>.
+            </para>
+        </section>
+
+        <section id='setting-a-default-value'>
+            <title>Setting a default value (?=)</title>
+
+            <para>
+                You can use the "?=" operator to achieve a "softer" assignment
+                for a variable.
+                This type of assignment allows you to define a variable if it
+                is undefined when the statement is parsed, but to leave the
+                value alone if the variable has a value.
+                Here is an example:
+                <literallayout class='monospaced'>
+     A ?= "aval"
+                </literallayout>
+                If <filename>A</filename> is set at the time this statement is parsed,
+                the variable retains its value.
+                However, if <filename>A</filename> is not set,
+                the variable is set to "aval".
+                <note>
+                    This assignment is immediate.
+                    Consequently, if multiple "?=" assignments
+                    to a single variable exist, the first of those ends up getting
+                    used.
+                </note>
+            </para>
+        </section>
+
+        <section id='setting-a-weak-default-value'>
+            <title>Setting a weak default value (??=)</title>
+
+            <para>
+                It is possible to use a "weaker" assignment than in the
+                previous section by using the "??=" operator.
+                This assignment behaves identical to "?=" except that the
+                assignment is made at the end of the parsing process rather
+                than immediately.
+                Consequently, when multiple "??=" assignments exist, the last
+                one is used.
+                Also, any "=" or "?=" assignment will override the value set with
+                "??=".
+                Here is an example:
+                <literallayout class='monospaced'>
+     A ??= "somevalue"
+     A ??= "someothervalue"
+                </literallayout>
+                If <filename>A</filename> is set before the above statements are parsed,
+                the variable retains its value.
+                If <filename>A</filename> is not set,
+                the variable is set to "someothervalue".
+            </para>
+
+            <para>
+                Again, this assignment is a "lazy" or "weak" assignment
+                because it does not occur until the end
+                of the parsing process.
+            </para>
+        </section>
+
+        <section id='immediate-variable-expansion'>
+            <title>Immediate variable expansion (:=)</title>
+
+            <para>
+                The ":=" operator results in a variable's
+                contents being expanded immediately,
+                rather than when the variable is actually used:
+                <literallayout class='monospaced'>
+     T = "123"
+     A := "${B} ${A} test ${T}"
+     T = "456"
+     B = "${T} bval"
+     C = "cval"
+     C := "${C}append"
+                </literallayout>
+                In this example, <filename>A</filename> contains
+                "test 123" because <filename>${B}</filename> and
+                <filename>${A}</filename> at the time of parsing are undefined,
+                which leaves "test 123".
+                And, the variable <filename>C</filename>
+                contains "cvalappend" since <filename>${C}</filename> immediately
+                expands to "cval".
+            </para>
+        </section>
+
+        <section id='appending-and-prepending'>
+            <title>Appending (+=) and prepending (=+) With Spaces</title>
+
+            <para>
+                Appending and prepending values is common and can be accomplished
+                using the "+=" and "=+" operators.
+                These operators insert a space between the current
+                value and prepended or appended value.
+            </para>
+
+            <para>
+                These operators take immediate effect during parsing.
+                Here are some examples:
+                <literallayout class='monospaced'>
+     B = "bval"
+     B += "additionaldata"
+     C = "cval"
+     C =+ "test"
+                </literallayout>
+                The variable <filename>B</filename> contains
+                "bval additionaldata" and <filename>C</filename>
+                contains "test cval".
+            </para>
+        </section>
+
+        <section id='appending-and-prepending-without-spaces'>
+            <title>Appending (.=) and Prepending (=.) Without Spaces</title>
+
+            <para>
+                If you want to append or prepend values without an
+                inserted space, use the ".=" and "=." operators.
+            </para>
+
+            <para>
+                These operators take immediate effect during parsing.
+                Here are some examples:
+                <literallayout class='monospaced'>
+     B = "bval"
+     B .= "additionaldata"
+     C = "cval"
+     C =. "test"
+                </literallayout>
+                The variable <filename>B</filename> contains
+                "bvaladditionaldata" and
+                <filename>C</filename> contains "testcval".
+            </para>
+        </section>
+
+        <section id='appending-and-prepending-override-style-syntax'>
+            <title>Appending and Prepending (Override Style Syntax)</title>
+
+            <para>
+                You can also append and prepend a variable's value
+                using an override style syntax.
+                When you use this syntax, no spaces are inserted.
+            </para>
+
+            <para>
+                These operators differ from the ":=", ".=", "=.", "+=", and "=+"
+                operators in that their effects are deferred
+                until after parsing completes rather than being immediately
+                applied.
+                Here are some examples:
+                <literallayout class='monospaced'>
+     B = "bval"
+     B_append = " additional data"
+     C = "cval"
+     C_prepend = "additional data "
+     D = "dval"
+     D_append = "additional data"
+                </literallayout>
+                The variable <filename>B</filename> becomes
+                "bval additional data" and <filename>C</filename> becomes
+                "additional data cval".
+                The variable <filename>D</filename> becomes
+                "dvaladditional data".
+                <note>
+                    You must control all spacing when you use the
+                    override syntax.
+                </note>
+            </para>
+        </section>
+
+        <section id='removing-override-style-syntax'>
+            <title>Removal (Override Style Syntax)</title>
+
+            <para>
+                You can remove values from lists using the removal
+                override style syntax.
+                Specifying a value for removal causes all occurrences of that
+                value to be removed from the variable.
+            </para>
+
+            <para>
+                When you use this syntax, BitBake expects one or more strings.
+                Surrounding spaces are removed as well.
+                Here is an example:
+                <literallayout class='monospaced'>
+     FOO = "123 456 789 123456 123 456 123 456"
+     FOO_remove = "123"
+     FOO_remove = "456"
+     FOO2 = "abc def ghi abcdef abc def abc def"
+     FOO2_remove = "abc def"
+                </literallayout>
+                The variable <filename>FOO</filename> becomes
+                "789 123456" and <filename>FOO2</filename> becomes
+                "ghi abcdef".
+            </para>
+        </section>
+
+        <section id='variable-flag-syntax'>
+            <title>Variable Flag Syntax</title>
+
+            <para>
+                Variable flags are BitBake's implementation of variable properties
+                or attributes.
+                It is a way of tagging extra information onto a variable.
+                You can find more out about variable flags in general in the
+                "<link linkend='variable-flags'>Variable Flags</link>"
+                section.
+            </para>
+
+            <para>
+                You can define, append, and prepend values to variable flags.
+                All the standard syntax operations previously mentioned work
+                for variable flags except for override style syntax
+                (i.e. <filename>_prepend</filename>, <filename>_append</filename>,
+                and <filename>_remove</filename>).
+            </para>
+
+            <para>
+                Here are some examples showing how to set variable flags:
+                <literallayout class='monospaced'>
+     FOO[a] = "abc"
+     FOO[b] = "123"
+     FOO[a] += "456"
+                </literallayout>
+                The variable <filename>FOO</filename> has two flags:
+                <filename>a</filename> and <filename>b</filename>.
+                The flags are immediately set to "abc" and "123", respectively.
+                The <filename>a</filename> flag becomes "abc 456".
+            </para>
+
+            <para>
+                No need exists to pre-define variable flags.
+                You can simply start using them.
+                One extremely common application
+                is to attach some brief documentation to a BitBake variable as
+                follows:
+                <literallayout class='monospaced'>
+     CACHE[doc] = "The directory holding the cache of the metadata."
+                </literallayout>
+            </para>
+        </section>
+
+        <section id='inline-python-variable-expansion'>
+            <title>Inline Python Variable Expansion</title>
+
+            <para>
+                You can use inline Python variable expansion to
+                set variables.
+                Here is an example:
+                <literallayout class='monospaced'>
+     DATE = "${@time.strftime('%Y%m%d',time.gmtime())}"
+                </literallayout>
+                This example results in the <filename>DATE</filename>
+                variable being set to the current date.
+            </para>
+
+            <para>
+                Probably the most common use of this feature is to extract
+                the value of variables from BitBake's internal data dictionary,
+                <filename>d</filename>.
+                The following lines select the values of a package name
+                and its version number, respectively:
+                <literallayout class='monospaced'>
+     PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
+     PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
+                </literallayout>
+            </para>
+        </section>
+
+        <section id='providing-pathnames'>
+            <title>Providing Pathnames</title>
+
+            <para>
+                When specifying pathnames for use with BitBake,
+                do not use the tilde ("~") character as a shortcut
+                for your home directory.
+                Doing so might cause BitBake to not recognize the
+                path since BitBake does not expand this character in
+                the same way a shell would.
+            </para>
+
+            <para>
+                Instead, provide a fuller path as the following
+                example illustrates:
+                <literallayout class='monospaced'>
+     BBLAYERS ?= " \
+       /home/scott-lenovo/LayerA \
+       "
+                </literallayout>
+            </para>
+        </section>
+    </section>
+
+    <section id='conditional-syntax-overrides'>
+        <title>Conditional Syntax (Overrides)</title>
+
+        <para>
+            BitBake uses
+            <link linkend='var-OVERRIDES'><filename>OVERRIDES</filename></link>
+            to control what variables are overridden after BitBake
+            parses recipes and configuration files.
+            This section describes how you can use
+            <filename>OVERRIDES</filename> as conditional metadata,
+            talks about key expansion in relationship to
+            <filename>OVERRIDES</filename>, and provides some examples
+            to help with understanding.
+        </para>
+
+        <section id='conditional-metadata'>
+            <title>Conditional Metadata</title>
+
+            <para>
+                You can use <filename>OVERRIDES</filename> to conditionally select
+                a specific version of a variable and to conditionally
+                append or prepend the value of a variable.
+                <itemizedlist>
+                    <listitem><para><emphasis>Selecting a Variable:</emphasis>
+                        The <filename>OVERRIDES</filename> variable is
+                        a colon-character-separated list that contains items
+                        for which you want to satisfy conditions.
+                        Thus, if you have a variable that is conditional on “arm”, and “arm”
+                        is in <filename>OVERRIDES</filename>, then the “arm”-specific
+                        version of the variable is used rather than the non-conditional
+                        version.
+                        Here is an example:
+                        <literallayout class='monospaced'>
+     OVERRIDES = "architecture:os:machine"
+     TEST = "default"
+     TEST_os = "osspecific"
+     TEST_nooverride = "othercondvalue"
+                        </literallayout>
+                        In this example, the <filename>OVERRIDES</filename>
+                        variable lists three overrides:
+                        "architecture", "os", and "machine".
+                        The variable <filename>TEST</filename> by itself has a default
+                        value of "default".
+                        You select the os-specific version of the <filename>TEST</filename>
+                        variable by appending the "os" override to the variable
+                        (i.e.<filename>TEST_os</filename>).
+                        </para>
+
+                        <para>
+                            To better understand this, consider a practical example
+                            that assumes an OpenEmbedded metadata-based Linux
+                            kernel recipe file.
+                            The following lines from the recipe file first set
+                            the kernel branch variable <filename>KBRANCH</filename>
+                            to a default value, then conditionally override that
+                            value based on the architecture of the build:
+                        <literallayout class='monospaced'>
+     KBRANCH = "standard/base"
+     KBRANCH_qemuarm  = "standard/arm-versatile-926ejs"
+     KBRANCH_qemumips = "standard/mti-malta32"
+     KBRANCH_qemuppc  = "standard/qemuppc"
+     KBRANCH_qemux86  = "standard/common-pc/base"
+     KBRANCH_qemux86-64  = "standard/common-pc-64/base"
+     KBRANCH_qemumips64 = "standard/mti-malta64"
+                        </literallayout>
+                        </para></listitem>
+                    <listitem><para><emphasis>Appending and Prepending:</emphasis>
+                        BitBake also supports append and prepend operations to
+                        variable values based on whether a specific item is
+                        listed in <filename>OVERRIDES</filename>.
+                        Here is an example:
+                        <literallayout class='monospaced'>
+     DEPENDS = "glibc ncurses"
+     OVERRIDES = "machine:local"
+     DEPENDS_append_machine = "libmad"
+                        </literallayout>
+                        In this example, <filename>DEPENDS</filename> becomes
+                        "glibc ncurses libmad".
+                        </para>
+
+                        <para>
+                            Again, using an OpenEmbedded metadata-based
+                            kernel recipe file as an example, the
+                            following lines will conditionally append to the
+                            <filename>KERNEL_FEATURES</filename> variable based
+                            on the architecture:
+                        <literallayout class='monospaced'>
+     KERNEL_FEATURES_append = " ${KERNEL_EXTRA_FEATURES}"
+     KERNEL_FEATURES_append_qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
+     KERNEL_FEATURES_append_qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
+                        </literallayout>
+                        </para></listitem>
+                </itemizedlist>
+            </para>
+        </section>
+
+        <section id='key-expansion'>
+            <title>Key Expansion</title>
+
+            <para>
+                Key expansion happens when the BitBake datastore is finalized
+                just before BitBake expands overrides.
+                To better understand this, consider the following example:
+                <literallayout class='monospaced'>
+     A${B} = "X"
+     B = "2"
+     A2 = "Y"
+                </literallayout>
+                In this case, after all the parsing is complete, and
+                before any overrides are handled, BitBake expands
+                <filename>${B}</filename> into "2".
+                This expansion causes <filename>A2</filename>, which was
+                set to "Y" before the expansion, to become "X".
+            </para>
+        </section>
+
+        <section id='variable-interaction-worked-examples'>
+            <title>Examples</title>
+
+            <para>
+                Despite the previous explanations that show the different forms of
+                variable definitions, it can be hard to work
+                out exactly what happens when variable operators, conditional
+                overrides, and unconditional overrides are combined.
+                This section presents some common scenarios along
+                with explanations for variable interactions that
+                typically confuse users.
+            </para>
+
+            <para>
+                There is often confusion concerning the order in which
+                overrides and various "append" operators take effect.
+                Recall that an append or prepend operation using "_append"
+                and "_prepend" does not result in an immediate assignment
+                as would "+=", ".=", "=+", or "=.".
+                Consider the following example:
+                <literallayout class='monospaced'>
+     OVERRIDES = "foo"
+     A = "Z"
+     A_foo_append = "X"
+                </literallayout>
+                For this case, <filename>A</filename> is
+                unconditionally set to "Z" and "X" is
+                unconditionally and immediately appended to the variable
+                <filename>A_foo</filename>.
+                Because overrides have not been applied yet,
+                <filename>A_foo</filename> is set to "X" due to the append
+                and <filename>A</filename> simply equals "Z".
+            </para>
+
+            <para>
+                Applying overrides, however, changes things.
+                Since "foo" is listed in <filename>OVERRIDES</filename>,
+                the conditional variable <filename>A</filename> is replaced
+                with the "foo" version, which is equal to "X".
+                So effectively, <filename>A_foo</filename> replaces <filename>A</filename>.
+            </para>
+
+            <para>
+                This next example changes the order of the override and
+                the append:
+                <literallayout class='monospaced'>
+     OVERRIDES = "foo"
+     A = "Z"
+     A_append_foo = "X"
+                </literallayout>
+                For this case, before overrides are handled,
+                <filename>A</filename> is set to "Z" and <filename>A_append_foo</filename>
+                is set to "X".
+                Once the override for "foo" is applied, however,
+                <filename>A</filename> gets appended with "X".
+                Consequently, <filename>A</filename> becomes "ZX".
+                Notice that spaces are not appended.
+            </para>
+
+            <para>
+                This next example has the order of the appends and overrides reversed
+                back as in the first example:
+                <literallayout class='monospaced'>
+     OVERRIDES = "foo"
+     A = "Y"
+     A_foo_append = "Z"
+     A_foo_append += "X"
+                </literallayout>
+                For this case, before any overrides are resolved,
+                <filename>A</filename> is set to "Y" using an immediate assignment.
+                After this immediate assignment, <filename>A_foo</filename> is set
+                to "Z", and then further appended with
+                "X" leaving the variable set to "Z X".
+                Finally, applying the override for "foo" results in the conditional
+                variable <filename>A</filename> becoming "Z X" (i.e.
+                <filename>A</filename> is replaced with <filename>A_foo</filename>).
+            </para>
+
+            <para>
+                This final example mixes in some varying operators:
+                <literallayout class='monospaced'>
+     A = "1"
+     A_append = "2"
+     A_append = "3"
+     A += "4"
+     A .= "5"
+                </literallayout>
+                For this case, the type of append operators are affecting the
+                order of assignments as BitBake passes through the code
+                multiple times.
+                Initially, <filename>A</filename> is set to "1 45" because
+                of the three statements that use immediate operators.
+                After these assignments are made, BitBake applies the
+                <filename>_append</filename> operations.
+                Those operations result in <filename>A</filename> becoming "1 4523".
+            </para>
+        </section>
+    </section>
+
+    <section id='sharing-functionality'>
+        <title>Sharing Functionality</title>
+
+        <para>
+            BitBake allows for metadata sharing through include files
+            (<filename>.inc</filename>) and class files
+            (<filename>.bbclass</filename>).
+            For example, suppose you have a piece of common functionality
+            such as a task definition that you want to share between
+            more than one recipe.
+            In this case, creating a <filename>.bbclass</filename>
+            file that contains the common functionality and then using
+            the <filename>inherit</filename> directive in your recipes to
+            inherit the class would be a common way to share the task.
+        </para>
+
+        <para>
+            This section presents the mechanisms BitBake provides to
+            allow you to share functionality between recipes.
+            Specifically, the mechanisms include <filename>include</filename>,
+            <filename>inherit</filename>, <filename>INHERIT</filename>, and
+            <filename>require</filename> directives.
+        </para>
+
+        <section id='locating-include-and-class-files'>
+            <title>Locating Include and Class Files</title>
+
+            <para>
+                BitBake uses the
+                <link linkend='var-BBPATH'><filename>BBPATH</filename></link>
+                variable to locate needed include and class files.
+                The <filename>BBPATH</filename> variable is analogous to
+                the environment variable <filename>PATH</filename>.
+            </para>
+
+            <para>
+                In order for include and class files to be found by BitBake,
+                they need to be located in a "classes" subdirectory that can
+                be found in <filename>BBPATH</filename>.
+            </para>
+        </section>
+
+        <section id='inherit-directive'>
+            <title><filename>inherit</filename> Directive</title>
+
+            <para>
+                When writing a recipe or class file, you can use the
+                <filename>inherit</filename> directive to inherit the
+                functionality of a class (<filename>.bbclass</filename>).
+                BitBake only supports this directive when used within recipe
+                and class files (i.e. <filename>.bb</filename> and
+                <filename>.bbclass</filename>).
+            </para>
+
+            <para>
+                The <filename>inherit</filename> directive is a rudimentary
+                means of specifying what classes of functionality your
+                recipes require.
+                For example, you can easily abstract out the tasks involved in
+                building a package that uses Autoconf and Automake and put
+                those tasks into a class file that can be used by your recipe.
+            </para>
+
+            <para>
+                As an example, your recipes could use the following directive
+                to inherit an <filename>autotools.bbclass</filename> file.
+                The class file would contain common functionality for using
+                Autotools that could be shared across recipes:
+                <literallayout class='monospaced'>
+     inherit autotools
+                </literallayout>
+                In this case, BitBake would search for the directory
+                <filename>classes/autotools.bbclass</filename>
+                in <filename>BBPATH</filename>.
+                <note>
+                    You can override any values and functions of the
+                    inherited class within your recipe by doing so
+                    after the "inherit" statement.
+                </note>
+            </para>
+        </section>
+
+        <section id='include-directive'>
+            <title><filename>include</filename> Directive</title>
+
+            <para>
+                BitBake understands the <filename>include</filename>
+                directive.
+                This directive causes BitBake to parse whatever file you specify,
+                and to insert that file at that location.
+                The directive is much like its equivalent in Make except
+                that if the path specified on the include line is a relative
+                path, BitBake locates the first file it can find
+                within <filename>BBPATH</filename>.
+            </para>
+
+            <para>
+                As an example, suppose you needed a recipe to include some
+                self-test definitions:
+                <literallayout class='monospaced'>
+     include test_defs.inc
+                </literallayout>
+                <note>
+                    The <filename>include</filename> directive does not
+                    produce an error when the file cannot be found.
+                    Consequently, it is recommended that if the file you
+                    are including is expected to exist, you should use
+                    <link linkend='require-inclusion'><filename>require</filename></link>
+                    instead of <filename>include</filename>.
+                    Doing so makes sure that an error is produced if the
+                    file cannot be found.
+                </note>
+            </para>
+        </section>
+
+        <section id='require-inclusion'>
+            <title><filename>require</filename> Directive</title>
+
+            <para>
+                BitBake understands the <filename>require</filename>
+                directive.
+                This directive behaves just like the
+                <filename>include</filename> directive with the exception that
+                BitBake raises a parsing error if the file to be included cannot
+                be found.
+                Thus, any file you require is inserted into the file that is
+                being parsed at the location of the directive.
+            </para>
+
+            <para>
+                Similar to how BitBake handles
+                <link linkend='include-directive'><filename>include</filename></link>,
+                if the path specified
+                on the require line is a relative path, BitBake locates
+                the first file it can find within <filename>BBPATH</filename>.
+            </para>
+
+            <para>
+                As an example, suppose you have two versions of a recipe
+                (e.g. <filename>foo_1.2.2.bb</filename> and
+                <filename>foo_2.0.0.bb</filename>) where
+                each version contains some identical functionality that could be
+                shared.
+                You could create an include file named <filename>foo.inc</filename>
+                that contains the common definitions needed to build "foo".
+                You need to be sure <filename>foo.inc</filename> is located in the
+                same directory as your two recipe files as well.
+                Once these conditions are set up, you can share the functionality
+                using a <filename>require</filename> directive from within each
+                recipe:
+                <literallayout class='monospaced'>
+     require foo.inc
+                </literallayout>
+            </para>
+        </section>
+
+        <section id='inherit-configuration-directive'>
+            <title><filename>INHERIT</filename> Configuration Directive</title>
+
+            <para>
+                When creating a configuration file (<filename>.conf</filename>),
+                you can use the <filename>INHERIT</filename> directive to
+                inherit a class.
+                BitBake only supports this directive when used within
+                a configuration file.
+            </para>
+
+            <para>
+                As an example, suppose you needed to inherit a class
+                file called <filename>abc.bbclass</filename> from a
+                configuration file as follows:
+                <literallayout class='monospaced'>
+     INHERIT += "abc"
+                </literallayout>
+                This configuration directive causes the named
+                class to be inherited at the point of the directive
+                during parsing.
+                As with the <filename>inherit</filename> directive, the
+                <filename>.bbclass</filename> file must be located in a
+                "classes" subdirectory in one of the directories specified
+                in <filename>BBPATH</filename>.
+                <note>
+                    Because <filename>.conf</filename> files are parsed
+                    first during BitBake's execution, using
+                    <filename>INHERIT</filename> to inherit a class effectively
+                    inherits the class globally (i.e. for all recipes).
+                </note>
+            </para>
+        </section>
+    </section>
+
+    <section id='functions'>
+        <title>Functions</title>
+
+        <para>
+            As with most languages, functions are the building blocks that
+            are used to build up operations into tasks.
+            BitBake supports these types of functions:
+            <itemizedlist>
+                <listitem><para><emphasis>Shell Functions:</emphasis>
+                    Functions written in shell script and executed either
+                    directly as functions, tasks, or both.
+                    They can also be called by other shell functions.
+                    </para></listitem>
+                <listitem><para><emphasis>BitBake Style Python Functions:</emphasis>
+                    Functions written in Python and executed by BitBake or other
+                    Python functions using <filename>bb.build.exec_func()</filename>.
+                    </para></listitem>
+                <listitem><para><emphasis>Python Functions:</emphasis>
+                    Functions written in Python and executed by Python.
+                    </para></listitem>
+                <listitem><para><emphasis>Anonymous Python Functions:</emphasis>
+                    Python functions executed automatically during
+                    parsing.
+                    </para></listitem>
+            </itemizedlist>
+            Regardless of the type of function, you can only
+            define them in class (<filename>.bbclass</filename>)
+            and recipe (<filename>.bb</filename> or <filename>.inc</filename>)
+            files.
+        </para>
+
+        <section id='shell-functions'>
+            <title>Shell Functions</title>
+
+            <para>
+                Functions written in shell script and executed either
+                directly as functions, tasks, or both.
+                They can also be called by other shell functions.
+                Here is an example shell function definition:
+                <literallayout class='monospaced'>
+     some_function () {
+         echo "Hello World"
+     }
+                </literallayout>
+                When you create these types of functions in your recipe
+                or class files, you need to follow the shell programming
+                rules.
+                The scripts are executed by <filename>/bin/sh</filename>,
+                which may not be a bash shell but might be something
+                such as <filename>dash</filename>.
+                You should not use Bash-specific script (bashisms).
+            </para>
+        </section>
+
+        <section id='bitbake-style-python-functions'>
+            <title>BitBake Style Python Functions</title>
+
+            <para>
+                These functions are written in Python and executed by
+                BitBake or other Python functions using
+                <filename>bb.build.exec_func()</filename>.
+            </para>
+
+            <para>
+                An example BitBake function is:
+                <literallayout class='monospaced'>
+     python some_python_function () {
+         d.setVar("TEXT", "Hello World")
+         print d.getVar("TEXT", True)
+     }
+                </literallayout>
+                Because the Python "bb" and "os" modules are already
+                imported, you do not need to import these modules.
+                Also in these types of functions, the datastore ("d")
+                is a global variable and is always automatically
+                available.
+           </para>
+
+           <note>
+                Variable expressions (e.g. <filename>${X}</filename>) are no
+                longer expanded within Python functions.
+                This behavior is intentional in order to allow you to freely
+                set variable values to expandable expressions without having
+                them expanded prematurely.
+                If you do wish to expand a variable within a Python function,
+                use <filename>d.getVar("X", True)</filename>.
+                Or, for more complicated expressions, use
+                <filename>d.expand()</filename>.
+           </note>
+        </section>
+
+        <section id='python-functions'>
+            <title>Python Functions</title>
+
+            <para>
+                These functions are written in Python and are executed by
+                other Python code.
+                Examples of Python functions are utility functions
+                that you intend to call from in-line Python or
+                from within other Python functions.
+                Here is an example:
+                <literallayout class='monospaced'>
+     def get_depends(d):
+         if d.getVar('SOMECONDITION', True):
+             return "dependencywithcond"
+         else:
+             return "dependency"
+     SOMECONDITION = "1"
+     DEPENDS = "${@get_depends(d)}"
+                </literallayout>
+                This would result in <filename>DEPENDS</filename>
+                containing <filename>dependencywithcond</filename>.
+            </para>
+
+            <para>
+                Here are some things to know about Python functions:
+                <itemizedlist>
+                    <listitem><para>Python functions can take parameters.
+                        </para></listitem>
+                    <listitem><para>The BitBake datastore is not
+                        automatically available.
+                        Consequently, you must pass it in as a
+                        parameter to the function.
+                        </para></listitem>
+                    <listitem><para>The "bb" and "os" Python modules are
+                        automatically available.
+                        You do not need to import them.
+                        </para></listitem>
+                </itemizedlist>
+            </para>
+        </section>
+
+        <section id='anonymous-python-functions'>
+            <title>Anonymous Python Functions</title>
+
+            <para>
+                Sometimes it is useful to run some code during
+                parsing to set variables or to perform other operations
+                programmatically.
+                To do this, you can define an anonymous Python function.
+                Here is an example that conditionally sets a
+                variable based on the value of another variable:
+                <literallayout class='monospaced'>
+     python __anonymous () {
+         if d.getVar('SOMEVAR', True) == 'value':
+             d.setVar('ANOTHERVAR', 'value2')
+     }
+                </literallayout>
+                The "__anonymous" function name is optional, so the
+                following example is functionally equivalent to the above:
+                <literallayout class='monospaced'>
+     python () {
+         if d.getVar('SOMEVAR', True) == 'value':
+             d.setVar('ANOTHERVAR', 'value2')
+     }
+                </literallayout>
+                Because unlike other Python functions anonymous
+                Python functions are executed during parsing, the
+                "d" variable within an anonymous Python function represents
+                the datastore for the entire recipe.
+                Consequently, you can set variable values here and
+                those values can be picked up by other functions.
+            </para>
+        </section>
+
+        <section id='flexible-inheritance-for-class-functions'>
+            <title>Flexible Inheritance for Class Functions</title>
+
+            <para>
+                Through coding techniques and the use of
+                <filename>EXPORT_FUNCTIONS</filename>, BitBake supports
+                exporting a function from a class such that the
+                class function appears as the default implementation
+                of the function, but can still be called if a recipe
+                inheriting the class needs to define its own version of
+                the function.
+            </para>
+
+            <para>
+                To understand the benefits of this feature, consider
+                the basic scenario where a class defines a task function
+                and your recipe inherits the class.
+                In this basic scenario, your recipe inherits the task
+                function as defined in the class.
+                If desired, your recipe can add to the start and end of the
+                function by using the "_prepend" or "_append" operations
+                respectively, or it can redefine the function completely.
+                However, if it redefines the function, there is
+                no means  for it to call the class version of the function.
+                <filename>EXPORT_FUNCTIONS</filename> provides a mechanism
+                that enables the recipe's version of the function to call
+                the original version of the function.
+            </para>
+
+            <para>
+                To make use of this technique, you need the following
+                things in place:
+                <itemizedlist>
+                    <listitem><para>
+                        The class needs to define the function as follows:
+                        <literallayout class='monospaced'>
+     <replaceable>classname</replaceable><filename>_</filename><replaceable>functionname</replaceable>
+                        </literallayout>
+                        For example, if you have a class file
+                        <filename>bar.bbclass</filename> and a function named
+                        <filename>do_foo</filename>, the class must define the function
+                        as follows:
+                        <literallayout class='monospaced'>
+     bar_do_foo
+                        </literallayout>
+                        </para></listitem>
+                    <listitem><para>
+                        The class needs to contain the <filename>EXPORT_FUNCTIONS</filename>
+                        statement as follows:
+                        <literallayout class='monospaced'>
+     EXPORT_FUNCTIONS <replaceable>functionname</replaceable>
+                        </literallayout>
+                        For example, continuing with the same example, the
+                        statement in the <filename>bar.bbclass</filename> would be
+                        as follows:
+                        <literallayout class='monospaced'>
+     EXPORT_FUNCTIONS do_foo
+                        </literallayout>
+                        </para></listitem>
+                    <listitem><para>
+                        You need to call the function appropriately from within your
+                        recipe.
+                        Continuing with the same example, if your recipe
+                        needs to call the class version of the function,
+                        it should call <filename>bar_do_foo</filename>.
+                        Assuming <filename>do_foo</filename> was a shell function
+                        and <filename>EXPORT_FUNCTIONS</filename> was used as above,
+                        the recipe's function could conditionally call the
+                        class version of the function as follows:
+                        <literallayout class='monospaced'>
+     do_foo() {
+             if [ somecondition ] ; then
+                     bar_do_foo
+             else
+                     # Do something else
+             fi
+     }
+                        </literallayout>
+                        To call your modified version of the function as defined
+                        in your recipe, call it as <filename>do_foo</filename>.
+                        </para></listitem>
+                </itemizedlist>
+                With these conditions met, your single recipe
+                can freely choose between the original function
+                as defined in the class file and the modified function in your recipe.
+                If you do not set up these conditions, you are limited to using one function
+                or the other.
+            </para>
+        </section>
+    </section>
+
+    <section id='tasks'>
+        <title>Tasks</title>
+
+        <para>
+            Tasks are BitBake execution units that originate as
+            functions and make up the steps that BitBake needs to run
+            for given recipe.
+            Tasks are only supported in recipe (<filename>.bb</filename>
+            or <filename>.inc</filename>) and class
+            (<filename>.bbclass</filename>) files.
+            By convention, task names begin with the string "do_".
+        </para>
+
+        <para>
+            Here is an example of a task that prints out the date:
+            <literallayout class='monospaced'>
+     python do_printdate () {
+         import time
+         print time.strftime('%Y%m%d', time.gmtime())
+     }
+     addtask printdate after do_fetch before do_build
+            </literallayout>
+        </para>
+
+        <section id='promoting-a-function-to-a-task'>
+            <title>Promoting a Function to a Task</title>
+
+            <para>
+                Any function can be promoted to a task by applying the
+                <filename>addtask</filename> command.
+                The <filename>addtask</filename> command also describes
+                inter-task dependencies.
+                Here is the function from the previous section but with the
+                <filename>addtask</filename> command promoting it to a task
+                and defining some dependencies:
+                <literallayout class='monospaced'>
+     python do_printdate () {
+         import time
+         print time.strftime('%Y%m%d', time.gmtime())
+     }
+     addtask printdate after do_fetch before do_build
+                </literallayout>
+                In the example, the function is defined and then promoted
+                as a task.
+                The <filename>do_printdate</filename> task becomes a dependency of
+                the <filename>do_build</filename> task, which is the default
+                task.
+                And, the <filename>do_printdate</filename> task is dependent upon
+                the <filename>do_fetch</filename> task.
+                Execution of the <filename>do_build</filename> task results
+                in the <filename>do_printdate</filename> task running first.
+            </para>
+        </section>
+
+        <section id='deleting-a-task'>
+            <title>Deleting a Task</title>
+
+            <para>
+                As well as being able to add tasks, you can delete them.
+                Simply use the <filename>deltask</filename> command to
+                delete a task.
+                For example, to delete the example task used in the previous
+                sections, you would use:
+                <literallayout class='monospaced'>
+     deltask printdate
+                </literallayout>
+                If you delete a task using the <filename>deltask</filename>
+                command and the task has dependencies, the dependencies are
+                not reconnected.
+                For example, suppose you have three tasks named
+                <filename>do_a</filename>, <filename>do_b</filename>, and
+                <filename>do_c</filename>.
+                Furthermore, <filename>do_c</filename> is dependent on
+                <filename>do_b</filename>, which in turn is dependent on
+                <filename>do_a</filename>.
+                Given this scenario, if you use <filename>deltask</filename>
+                to delete <filename>do_b</filename>, the implicit dependency
+                relationship between <filename>do_c</filename> and
+                <filename>do_a</filename> through <filename>do_b</filename>
+                no longer exists, and <filename>do_c</filename> dependencies
+                are not updated to include <filename>do_a</filename>.
+                Thus, <filename>do_c</filename> is free to run before
+                <filename>do_a</filename>.
+            </para>
+
+            <para>
+                If you want dependencies such as these to remain intact, use
+                the <filename>noexec</filename> varflag to disable the task
+                instead of using the <filename>deltask</filename> command to
+                delete it:
+                <literallayout class='monospaced'>
+     do_b[noexec] = "1"
+                </literallayout>
+            </para>
+        </section>
+
+        <section id='passing-information-into-the-build-task-environment'>
+            <title>Passing Information Into the Build Task Environment</title>
+
+            <para>
+                When running a task, BitBake tightly controls the shell execution
+                environment of the build tasks to make
+                sure unwanted contamination from the build machine cannot
+                influence the build.
+                <note>
+                    By default, BitBake cleans the environment to include only those
+                    things exported or listed in its whitelist to ensure that the build
+                    environment is reproducible and consistent.
+                    You can prevent this "cleaning" by setting the
+                    <link linkend='var-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link>
+                    variable.
+                </note>
+                Consequently, if you do want something to get passed into the
+                build task environment, you must take these two steps:
+                <orderedlist>
+                    <listitem><para>
+                        Tell BitBake to load what you want from the environment
+                        into the datastore.
+                        You can do so through the
+                        <link linkend='var-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>
+                        and
+                        <link linkend='var-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link>
+                        variables.
+                        For example, assume you want to prevent the build system from
+                        accessing your <filename>$HOME/.ccache</filename>
+                        directory.
+                        The following command "whitelists" the environment variable
+                        <filename>CCACHE_DIR</filename> causing BitBack to allow that
+                        variable into the datastore:
+                        <literallayout class='monospaced'>
+     export BB_ENV_EXTRAWHITE="$BB_ENV_EXTRAWHITE CCACHE_DIR"
+                        </literallayout></para></listitem>
+                    <listitem><para>
+                        Tell BitBake to export what you have loaded into the
+                        datastore to the task environment of every running task.
+                        Loading something from the environment into the datastore
+                        (previous step) only makes it available in the datastore.
+                        To export it to the task environment of every running task,
+                        use a command similar to the following in your local configuration
+                        file <filename>local.conf</filename> or your
+                        distribution configuration file:
+                        <literallayout class='monospaced'>
+     export CCACHE_DIR
+                        </literallayout>
+                        <note>
+                            A side effect of the previous steps is that BitBake
+                            records the variable as a dependency of the build process
+                            in things like the setscene checksums.
+                            If doing so results in unnecessary rebuilds of tasks, you can
+                            whitelist the variable so that the setscene code
+                            ignores the dependency when it creates checksums.
+                        </note></para></listitem>
+                </orderedlist>
+            </para>
+
+            <para>
+                Sometimes, it is useful to be able to obtain information
+                from the original execution environment.
+                Bitbake saves a copy of the original environment into
+                a special variable named
+                <link linkend='var-BB_ORIGENV'><filename>BB_ORIGENV</filename></link>.
+            </para>
+
+            <para>
+                The <filename>BB_ORIGENV</filename> variable returns a datastore
+                object that can be queried using the standard datastore operators
+                such as <filename>getVar(, False)</filename>.
+                The datastore object is useful, for example, to find the original
+                <filename>DISPLAY</filename> variable.
+                Here is an example:
+                <literallayout class='monospaced'>
+     origenv = d.getVar("BB_ORIGENV", False)
+     bar = origenv.getVar("BAR", False)
+                </literallayout>
+                The previous example returns <filename>BAR</filename> from the original
+                execution environment.
+            </para>
+        </section>
+    </section>
+
+    <section id='variable-flags'>
+        <title>Variable Flags</title>
+
+        <para>
+            Variable flags (varflags) help control a task's functionality
+            and dependencies.
+            BitBake reads and writes varflags to the datastore using the following
+            command forms:
+            <literallayout class='monospaced'>
+     <replaceable>variable</replaceable> = d.getVarFlags("<replaceable>variable</replaceable>")
+     self.d.setVarFlags("FOO", {"func": True})
+            </literallayout>
+        </para>
+
+        <para>
+            When working with varflags, the same syntax, with the exception of
+            overrides, applies.
+            In other words, you can set, append, and prepend varflags just like
+            variables.
+            See the
+            "<link linkend='variable-flag-syntax'>Variable Flag Syntax</link>"
+            section for details.
+        </para>
+
+        <para>
+            BitBake has a defined set of varflags available for recipes and
+            classes.
+            Tasks support a number of these flags which control various
+            functionality of the task:
+            <itemizedlist>
+                <listitem><para><emphasis>cleandirs:</emphasis>
+                    Empty directories that should created before the task runs.
+                    </para></listitem>
+                <listitem><para><emphasis>depends:</emphasis>
+                    Controls inter-task dependencies.
+                    See the
+                    <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
+                    variable and the
+                    "<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
+                    section for more information.
+                    </para></listitem>
+                <listitem><para><emphasis>deptask:</emphasis>
+                    Controls task build-time dependencies.
+                    See the
+                    <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
+                    variable and the
+                    "<link linkend='build-dependencies'>Build Dependencies</link>"
+                    section for more information.
+                    </para></listitem>
+                <listitem><para><emphasis>dirs:</emphasis>
+                    Directories that should be created before the task runs.
+                    The last directory listed will be used as the work directory
+                    for the task.
+                    </para></listitem>
+                <listitem><para><emphasis>lockfiles:</emphasis>
+                     Specifies one or more lockfiles to lock while the task
+                     executes.
+                     Only one task may hold a lockfile, and any task that
+                     attempts to lock an already locked file will block until
+                     the lock is released.
+                     You can use this variable flag to accomplish mutual
+                     exclusion.
+                    </para></listitem>
+                <listitem><para><emphasis>noexec:</emphasis>
+                    Marks the tasks as being empty and no execution required.
+                    The <filename>noexec</filename> flag can be used to set up
+                    tasks as dependency placeholders, or to disable tasks defined
+                    elsewhere that are not needed in a particular recipe.
+                    </para></listitem>
+                <listitem><para><emphasis>nostamp:</emphasis>
+                    Tells BitBake to not generate a stamp file for a task,
+                    which implies the task should always be executed.
+                    </para></listitem>
+                <listitem><para><emphasis>postfuncs:</emphasis>
+                    List of functions to call after the completion of the task.
+                    </para></listitem>
+                <listitem><para><emphasis>prefuncs:</emphasis>
+                    List of functions to call before the task executes.
+                    </para></listitem>
+                <listitem><para><emphasis>rdepends:</emphasis>
+                    Controls inter-task runtime dependencies.
+                    See the
+                    <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
+                    variable, the
+                    <link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
+                    variable, and the
+                    "<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
+                    section for more information.
+                    </para></listitem>
+                <listitem><para><emphasis>rdeptask:</emphasis>
+                    Controls task runtime dependencies.
+                    See the
+                    <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
+                    variable, the
+                    <link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
+                    variable, and the
+                    "<link linkend='runtime-dependencies'>Runtime Dependencies</link>"
+                    section for more information.
+                    </para></listitem>
+                <listitem><para><emphasis>recideptask:</emphasis>
+                    When set in conjunction with
+                    <filename>recrdeptask</filename>, specifies a task that
+                    should be inspected for additional dependencies.
+                    </para></listitem>
+                <listitem><para><emphasis>recrdeptask:</emphasis>
+                    Controls task recursive runtime dependencies.
+                    See the
+                    <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
+                    variable, the
+                    <link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
+                    variable, and the
+                    "<link linkend='recursive-dependencies'>Recursive Dependencies</link>"
+                    section for more information.
+                    </para></listitem>
+                <listitem><para><emphasis>stamp-extra-info:</emphasis>
+                    Extra stamp information to append to the task's stamp.
+                    As an example, OpenEmbedded uses this flag to allow
+                    machine-specific tasks.
+                    </para></listitem>
+                <listitem><para><emphasis>umask:</emphasis>
+                    The umask to run the task under.
+                    </para></listitem>
+            </itemizedlist>
+        </para>
+
+        <para>
+            Several varflags are useful for controlling how signatures are
+            calculated for variables.
+            For more information on this process, see the
+            "<link linkend='checksums'>Checksums (Signatures)</link>"
+            section.
+            <itemizedlist>
+                <listitem><para><emphasis>vardeps:</emphasis>
+                    Specifies a space-separated list of additional
+                    variables to add to a variable's dependencies
+                    for the purposes of calculating its signature.
+                    Adding variables to this list is useful, for example, when
+                    a function refers to a variable in a manner that
+                    does not allow BitBake to automatically determine
+                    that the variable is referred to.
+                    </para></listitem>
+                <listitem><para><emphasis>vardepsexclude:</emphasis>
+                    Specifies a space-separated list of variables
+                    that should be excluded from a variable's dependencies
+                    for the purposes of calculating its signature.
+                    </para></listitem>
+                <listitem><para><emphasis>vardepvalue:</emphasis>
+                    If set, instructs BitBake to ignore the actual
+                    value of the variable and instead use the specified
+                    value when calculating the variable's signature.
+                    </para></listitem>
+                <listitem><para><emphasis>vardepvalueexclude:</emphasis>
+                    Specifies a pipe-separated list of strings to exclude
+                    from the variable's value when calculating the
+                    variable's signature.
+                    </para></listitem>
+            </itemizedlist>
+        </para>
+    </section>
+
+    <section id='events'>
+        <title>Events</title>
+
+        <para>
+            BitBake allows installation of event handlers within
+            recipe and class files.
+            Events are triggered at certain points during operation,
+            such as the beginning of an operation against a given recipe
+            (<filename>*.bb</filename> file), the start of a given task,
+            task failure, task success, and so forth.
+            The intent is to make it easy to do things like email
+            notification on build failure.
+        </para>
+
+        <para>
+            Following is an example event handler that
+            prints the name of the event and the content of
+            the <filename>FILE</filename> variable:
+            <literallayout class='monospaced'>
+     addhandler myclass_eventhandler
+     python myclass_eventhandler() {
+         from bb.event import getName
+         from bb import data
+         print("The name of the Event is %s" % getName(e))
+         print("The file we run for is %s" % data.getVar('FILE', e.data, True))
+     }
+            </literallayout>
+            This event handler gets called every time an event is
+            triggered.
+            A global variable "<filename>e</filename>" is defined and
+            "<filename>e.data</filename>" contains an instance of
+            "<filename>bb.data</filename>".
+            With the <filename>getName(e)</filename> method, one can get
+            the name of the triggered event.
+        </para>
+
+        <para>
+            Because you probably are only interested in a subset of events,
+            you would likely use the <filename>[eventmask]</filename> flag
+            for your event handler to be sure that only certain events
+            trigger the handler.
+            Given the previous example, suppose you only wanted the
+            <filename>bb.build.TaskFailed</filename> event to trigger that
+            event handler.
+            Use the flag as follows:
+            <literallayout class='monospaced'>
+     addhandler myclass_eventhandler
+     myclass_eventhandler[eventmask] = "bb.build.TaskFailed"
+     python myclass_eventhandler() {
+         from bb.event import getName
+         from bb import data
+         print("The name of the Event is %s" % getName(e))
+         print("The file we run for is %s" % data.getVar('FILE', e.data, True))
+     }
+            </literallayout>
+        </para>
+
+        <para>
+            During a standard build, the following common events might occur:
+            <itemizedlist>
+                <listitem><para>
+                    <filename>bb.event.ConfigParsed()</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.event.ParseStarted()</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.event.ParseProgress()</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.event.ParseCompleted()</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.event.BuildStarted()</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.build.TaskStarted()</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.build.TaskInvalid()</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.build.TaskFailedSilent()</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.build.TaskFailed()</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.build.TaskSucceeded()</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.event.BuildCompleted()</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.cooker.CookerExit()</filename>
+                    </para></listitem>
+            </itemizedlist>
+            Here is a list of other events that occur based on specific requests
+            to the server:
+            <itemizedlist>
+                <listitem><para>
+                    <filename>bb.event.TreeDataPreparationStarted()</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.event.TreeDataPreparationProgress</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.event.TreeDataPreparationCompleted</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.event.DepTreeGenerated</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.event.CoreBaseFilesFound</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.event.ConfigFilePathFound</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.event.FilesMatchingFound</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.event.ConfigFilesFound</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bb.event.TargetsTreeGenerated</filename>
+                    </para></listitem>
+            </itemizedlist>
+        </para>
+    </section>
+
+    <section id='variants-class-extension-mechanism'>
+        <title>Variants - Class Extension Mechanism</title>
+
+        <para>
+            BitBake supports two features that facilitate creating
+            from a single recipe file multiple incarnations of that
+            recipe file where all incarnations are buildable.
+            These features are enabled through the
+            <link linkend='var-BBCLASSEXTEND'><filename>BBCLASSEXTEND</filename></link>
+            and
+            <link linkend='var-BBVERSIONS'><filename>BBVERSIONS</filename></link>
+            variables.
+            <note>
+                The mechanism for this class extension is extremely
+                specific to the implementation.
+                Usually, the recipe's
+                <link linkend='var-PROVIDES'><filename>PROVIDES</filename></link>,
+                <link linkend='var-PN'><filename>PN</filename></link>, and
+                <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
+                variables would need to be modified by the extension class.
+                For specific examples, see the OE-Core
+                <filename>native</filename>, <filename>nativesdk</filename>,
+                and <filename>multilib</filename> classes.
+            </note>
+            <itemizedlist>
+                <listitem><para><emphasis><filename>BBCLASSEXTEND</filename>:</emphasis>
+                    This variable is a space separated list of classes used to "extend" the
+                    recipe for each variant.
+                    Here is an example that results in a second incarnation of the current
+                    recipe being available.
+                    This second incarnation will have the "native" class inherited.
+                    <literallayout class='monospaced'>
+     BBCLASSEXTEND = "native"
+                    </literallayout></para></listitem>
+                <listitem><para><emphasis><filename>BBVERSIONS</filename>:</emphasis>
+                    This variable allows a single recipe to build multiple versions of a
+                    project from a single recipe file.
+                    You can also specify conditional metadata
+                    (using the
+                    <link linkend='var-OVERRIDES'><filename>OVERRIDES</filename></link>
+                    mechanism) for a single version, or an optionally named range of versions.
+                    Here is an example:
+                    <literallayout class='monospaced'>
+     BBVERSIONS = "1.0 2.0 git"
+     SRC_URI_git = "git://someurl/somepath.git"
+
+     BBVERSIONS = "1.0.[0-6]:1.0.0+ \ 1.0.[7-9]:1.0.7+"
+     SRC_URI_append_1.0.7+ = "file://some_patch_which_the_new_versions_need.patch;patch=1"
+                    </literallayout>
+                    The name of the range defaults to the original version of the
+                    recipe.
+                    For example, in OpenEmbedded, the recipe file
+                    <filename>foo_1.0.0+.bb</filename> creates a default name range
+                    of <filename>1.0.0+</filename>.
+                    This is useful because the range name is not only placed
+                    into overrides, but it is also made available for the metadata to use
+                    in the variable that defines the base recipe versions for use in
+                    <filename>file://</filename> search paths
+                    (<link linkend='var-FILESPATH'><filename>FILESPATH</filename></link>).
+                    </para></listitem>
+            </itemizedlist>
+        </para>
+    </section>
+
+    <section id='dependencies'>
+        <title>Dependencies</title>
+
+        <para>
+            To allow for efficient operation given multiple processes
+            executing in parallel, BitBake handles dependencies at
+            the task level.
+            BitBake supports a robust method to handle these dependencies.
+        </para>
+
+        <para>
+            This section describes several types of dependency mechanisms.
+        </para>
+
+        <section id='dependencies-internal-to-the-bb-file'>
+            <title>Dependencies Internal to the <filename>.bb</filename> File</title>
+
+            <para>
+                BitBake uses the <filename>addtask</filename> directive
+                to manage dependencies that are internal to a given recipe
+                file.
+                You can use the <filename>addtask</filename> directive to
+                indicate when a task is dependent on other tasks or when
+                other tasks depend on that recipe.
+                Here is an example:
+                <literallayout class='monospaced'>
+     addtask printdate after do_fetch before do_build
+                </literallayout>
+                In this example, the <filename>printdate</filename> task is
+                depends on the completion of the <filename>do_fetch</filename>
+                task.
+                And, the <filename>do_build</filename> depends on the completion
+                of the <filename>printdate</filename> task.
+            </para>
+        </section>
+
+        <section id='build-dependencies'>
+            <title>Build Dependencies</title>
+
+            <para>
+                BitBake uses the
+                <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
+                variable to manage build time dependencies.
+                The "deptask" varflag for tasks signifies the task of each
+                item listed in <filename>DEPENDS</filename> that must
+                complete before that task can be executed.
+                Here is an example:
+                <literallayout class='monospaced'>
+     do_configure[deptask] = "do_populate_sysroot"
+                </literallayout>
+                In this example, the <filename>do_populate_sysroot</filename>
+                task of each item in <filename>DEPENDS</filename> must complete before
+                <filename>do_configure</filename> can execute.
+            </para>
+        </section>
+
+        <section id='runtime-dependencies'>
+            <title>Runtime Dependencies</title>
+
+            <para>
+                BitBake uses the
+                <link linkend='var-PACKAGES'><filename>PACKAGES</filename></link>,
+                <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>, and
+                <link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
+                variables to manage runtime dependencies.
+            </para>
+
+            <para>
+                The <filename>PACKAGES</filename> variable lists runtime
+                packages.
+                Each of those packages can have <filename>RDEPENDS</filename> and
+                <filename>RRECOMMENDS</filename> runtime dependencies.
+                The "rdeptask" flag for tasks is used to signify the task of each
+                item runtime dependency which must have completed before that
+                task can be executed.
+                <literallayout class='monospaced'>
+     do_package_qa[rdeptask] = "do_packagedata"
+                </literallayout>
+                In the previous example, the <filename>do_packagedata</filename>
+                task of each item in <filename>RDEPENDS</filename> must have
+                completed before <filename>do_package_qa</filename> can execute.
+            </para>
+        </section>
+
+        <section id='recursive-dependencies'>
+            <title>Recursive Dependencies</title>
+
+            <para>
+                BitBake uses the "recrdeptask" flag to manage
+                recursive task dependencies.
+                BitBake looks through the build-time and runtime
+                dependencies of the current recipe, looks through
+                the task's inter-task
+                dependencies, and then adds dependencies for the
+                listed task.
+                Once BitBake has accomplished this, it recursively works through
+                the dependencies of those tasks.
+                Iterative passes continue until all dependencies are discovered
+                and added.
+            </para>
+
+            <para>
+                You might want to not only have BitBake look for
+                dependencies of those tasks, but also have BitBake look
+                for build-time and runtime dependencies of the dependent
+                tasks as well.
+                If that is the case, you need to reference the task name
+                itself in the task list:
+                <literallayout class='monospaced'>
+     do_a[recrdeptask] = "do_a do_b"
+                </literallayout>
+            </para>
+        </section>
+
+        <section id='inter-task-dependencies'>
+            <title>Inter-Task Dependencies</title>
+
+            <para>
+                BitBake uses the "depends" flag in a more generic form
+                to manage inter-task dependencies.
+                This more generic form allows for inter-dependency
+                checks for specific tasks rather than checks for
+                the data in <filename>DEPENDS</filename>.
+                Here is an example:
+                <literallayout class='monospaced'>
+     do_patch[depends] = "quilt-native:do_populate_sysroot"
+                </literallayout>
+                In this example, the <filename>do_populate_sysroot</filename>
+                task of the target <filename>quilt-native</filename>
+                must have completed before the
+                <filename>do_patch</filename> task can execute.
+            </para>
+
+            <para>
+                The "rdepends" flag works in a similar way but takes targets
+                in the runtime namespace instead of the build-time dependency
+                namespace.
+            </para>
+        </section>
+    </section>
+
+    <section id='accessing-datastore-variables-using-python'>
+        <title>Accessing Datastore Variables Using Python</title>
+
+        <para>
+            It is often necessary to access variables in the
+            BitBake datastore using Python functions.
+            The Bitbake datastore has an API that allows you this
+            access.
+            Here is a list of available operations:
+        </para>
+
+        <para>
+            <informaltable frame='none'>
+                <tgroup cols='2' align='left' colsep='1' rowsep='1'>
+                    <colspec colname='c1' colwidth='1*'/>
+                    <colspec colname='c2' colwidth='1*'/>
+                    <thead>
+                        <row>
+                            <entry align="left"><emphasis>Operation</emphasis></entry>
+                            <entry align="left"><emphasis>Description</emphasis></entry>
+                        </row>
+                    </thead>
+                    <tbody>
+                        <row>
+                            <entry align="left"><filename>d.getVar("X", expand=False)</filename></entry>
+                            <entry align="left">Returns the value of variable "X".
+                                Using "expand=True" expands the value.</entry>
+                        </row>
+                        <row>
+                            <entry align="left"><filename>d.setVar("X", "value")</filename></entry>
+                            <entry align="left">Sets the variable "X" to "value".</entry>
+                        </row>
+                        <row>
+                            <entry align="left"><filename>d.appendVar("X", "value")</filename></entry>
+                            <entry align="left">Adds "value" to the end of the variable "X".</entry>
+                        </row>
+                        <row>
+                            <entry align="left"><filename>d.prependVar("X", "value")</filename></entry>
+                            <entry align="left">Adds "value" to the start of the variable "X".</entry>
+                        </row>
+                        <row>
+                            <entry align="left"><filename>d.delVar("X")</filename></entry>
+                            <entry align="left">Deletes the variable "X" from the datastore.</entry>
+                        </row>
+                        <row>
+                            <entry align="left"><filename>d.renameVar("X", "Y")</filename></entry>
+                            <entry align="left">Renames the variable "X" to "Y".</entry>
+                        </row>
+                        <row>
+                            <entry align="left"><filename>d.getVarFlag("X", flag, expand=False)</filename></entry>
+                            <entry align="left">Gets then named flag from the variable "X".
+                                Using "expand=True" expands the named flag.</entry>
+                        </row>
+                        <row>
+                            <entry align="left"><filename>d.setVarFlag("X", flag, "value")</filename></entry>
+                            <entry align="left">Sets the named flag for variable "X" to "value".</entry>
+                        </row>
+                        <row>
+                            <entry align="left"><filename>d.appendVarFlag("X", flag, "value")</filename></entry>
+                            <entry align="left">Appends "value" to the named flag on the
+                            variable "X".</entry>
+                        </row>
+                        <row>
+                            <entry align="left"><filename>d.prependVarFlag("X", flag, "value")</filename></entry>
+                            <entry align="left">Prepends "value" to the named flag on
+                               the variable "X".</entry>
+                        </row>
+                        <row>
+                            <entry align="left"><filename>d.delVarFlag("X", flag)</filename></entry>
+                            <entry align="left">Deletes the named flag on the variable
+                                "X" from the datastore.</entry>
+                        </row>
+                        <row>
+                            <entry align="left"><filename>d.setVarFlags("X", flagsdict)</filename></entry>
+                            <entry align="left">Sets the flags specified in
+                                the <filename>flagsdict()</filename> parameter.
+                                <filename>setVarFlags</filename> does not clear previous flags.
+                                Think of this operation as <filename>addVarFlags</filename>.</entry>
+                        </row>
+                        <row>
+                            <entry align="left"><filename>d.getVarFlags("X")</filename></entry>
+                            <entry align="left">Returns a <filename>flagsdict</filename> of the flags for
+                                the variable "X".</entry>
+                        </row>
+                        <row>
+                            <entry align="left"><filename>d.delVarFlags("X")</filename></entry>
+                            <entry align="left">Deletes all the flags for the variable "X".</entry>
+                        </row>
+                        <row>
+                            <entry align="left"><filename>d.expand(expression)</filename></entry>
+                            <entry align="left">Expands variable references in the specified string expression.</entry>
+                        </row>
+                    </tbody>
+                </tgroup>
+            </informaltable>
+        </para>
+    </section>
+
+    <section id='task-checksums-and-setscene'>
+        <title>Task Checksums and Setscene</title>
+
+        <para>
+            BitBake uses checksums (or signatures) along with the setscene
+            to determine if a task needs to be run.
+            This section describes the process.
+            To help understand how BitBake does this, the section assumes an
+            OpenEmbedded metadata-based example.
+        </para>
+
+        <para>
+            This list is a place holder of content existed from previous work
+            on the manual.
+            Some or all of it probably needs integrated into the subsections
+            that make up this section.
+            For now, I have just provided a short glossary-like description
+            for each variable.
+            Ultimately, this list goes away.
+            <itemizedlist>
+                <listitem><para><filename>STAMP</filename>:
+                    The base path to create stamp files.</para></listitem>
+                <listitem><para><filename>STAMPCLEAN</filename>
+                    Again, the base path to create stamp files but can use wildcards
+                    for matching a range of files for clean operations.
+                    </para></listitem>
+                <listitem><para><filename>BB_STAMP_WHITELIST</filename>
+                    Lists stamp files that are looked at when the stamp policy
+                    is "whitelist".
+                    </para></listitem>
+                <listitem><para><filename>BB_STAMP_POLICY</filename>
+                    Defines the mode for comparing timestamps of stamp files.
+                    </para></listitem>
+                <listitem><para><filename>BB_HASHCHECK_FUNCTION</filename>
+                    Specifies the name of the function to call during
+                    the "setscene" part of the task's execution in order
+                    to validate the list of task hashes.
+                    </para></listitem>
+                <listitem><para><filename>BB_SETSCENE_VERIFY_FUNCTION</filename>
+                    Specifies a function to call that verifies the list of
+                    planned task execution before the main task execution
+                    happens.
+                    </para></listitem>
+                <listitem><para><filename>BB_SETSCENE_DEPVALID</filename>
+                    Specifies a function BitBake calls that determines
+                    whether BitBake requires a setscene dependency to
+                    be met.
+                    </para></listitem>
+                <listitem><para><filename>BB_TASKHASH</filename>
+                    Within an executing task, this variable holds the hash
+                    of the task as returned by the currently enabled
+                    signature generator.
+                    </para></listitem>
+            </itemizedlist>
+        </para>
+    </section>
+</chapter>

+ 2319 - 0
bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml

@@ -0,0 +1,2319 @@
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
+"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
+[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
+
+<!-- Dummy chapter -->
+<chapter id='ref-variables-glos'>
+
+<title>Variables Glossary</title>
+
+<para>
+    This chapter lists common variables used by BitBake and gives an overview
+    of their function and contents.
+</para>
+
+<note>
+    Following are some points regarding the variables listed in this glossary:
+    <itemizedlist>
+        <listitem><para>The variables listed in this glossary
+            are specific to BitBake.
+            Consequently, the descriptions are limited to that context.
+            </para></listitem>
+        <listitem><para>Also, variables exist in other systems that use BitBake
+            (e.g. The Yocto Project and OpenEmbedded) that have names identical
+            to those found in this glossary.
+            For such cases, the variables in those systems extend the
+            functionality of the variable as it is described here in
+            this glossary.
+            </para></listitem>
+        <listitem><para>Finally, there are variables mentioned in this
+            glossary that do not appear in the BitBake glossary.
+            These other variables are variables used in systems that use
+            BitBake.
+            </para></listitem>
+    </itemizedlist>
+</note>
+
+<glossary id='ref-variables-glossary'>
+
+    <para>
+       <link linkend='var-ASSUME_PROVIDED'>A</link>
+       <link linkend='var-B'>B</link>
+       <link linkend='var-CACHE'>C</link>
+       <link linkend='var-DEFAULT_PREFERENCE'>D</link>
+       <link linkend='var-EXCLUDE_FROM_WORLD'>E</link>
+       <link linkend='var-FAKEROOT'>F</link>
+       <link linkend='var-GITDIR'>G</link>
+       <link linkend='var-HGDIR'>H</link>
+<!--       <link linkend='var-ICECC_DISABLED'>I</link> -->
+<!--               <link linkend='var-glossary-j'>J</link> -->
+<!--       <link linkend='var-KARCH'>K</link> -->
+       <link linkend='var-LAYERDEPENDS'>L</link>
+       <link linkend='var-MIRRORS'>M</link>
+<!--               <link linkend='var-glossary-n'>N</link> -->
+       <link linkend='var-OVERRIDES'>O</link>
+       <link linkend='var-PACKAGES'>P</link>
+<!--       <link linkend='var-QMAKE_PROFILES'>Q</link> -->
+       <link linkend='var-RDEPENDS'>R</link>
+       <link linkend='var-SECTION'>S</link>
+       <link linkend='var-T'>T</link>
+<!--       <link linkend='var-UBOOT_CONFIG'>U</link> -->
+<!--               <link linkend='var-glossary-v'>V</link> -->
+<!--       <link linkend='var-WARN_QA'>W</link> -->
+<!--               <link linkend='var-glossary-x'>X</link> -->
+<!--               <link linkend='var-glossary-y'>Y</link> -->
+<!--               <link linkend='var-glossary-z'>Z</link>-->
+    </para>
+
+    <glossdiv id='var-glossary-a'><title>A</title>
+
+        <glossentry id='var-ASSUME_PROVIDED'><glossterm>ASSUME_PROVIDED</glossterm>
+            <glossdef>
+                <para>
+                    Lists recipe names
+                    (<link linkend='var-PN'><filename>PN</filename></link>
+                    values) BitBake does not attempt to build.
+                    Instead, BitBake assumes these recipes have already been
+                    built.
+                </para>
+
+                <para>
+                    In OpenEmbedded Core, <filename>ASSUME_PROVIDED</filename>
+                    mostly specifies native tools that should not be built.
+                    An example is <filename>git-native</filename>, which
+                    when specified allows for the Git binary from the host to
+                    be used rather than building
+                    <filename>git-native</filename>.
+                </para>
+            </glossdef>
+        </glossentry>
+
+    </glossdiv>
+
+
+    <glossdiv id='var-glossary-b'><title>B</title>
+
+        <glossentry id='var-B'><glossterm>B</glossterm>
+            <glossdef>
+                <para>
+                    The directory in which BitBake executes functions
+                    during a recipe's build process.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_ALLOWED_NETWORKS'><glossterm>BB_ALLOWED_NETWORKS</glossterm>
+            <glossdef>
+                <para>
+                    Specifies a space-delimited list of hosts that the fetcher
+                    is allowed to use to obtain the required source code.
+                    Following are considerations surrounding this variable:
+                    <itemizedlist>
+                        <listitem><para>
+                            This host list is only used if
+                            <link linkend='var-BB_NO_NETWORK'><filename>BB_NO_NETWORK</filename></link>
+                            is either not set or set to "0".
+                            </para></listitem>
+                        <listitem><para>
+                            Limited support for wildcard matching against the
+                            beginning of host names exists.
+                            For example, the following setting matches
+                            <filename>git.gnu.org</filename>,
+                            <filename>ftp.gnu.org</filename>, and
+                            <filename>foo.git.gnu.org</filename>.
+                            <literallayout class='monospaced'>
+     BB_ALLOWED_NETWORKS = "*.gnu.org"
+                            </literallayout>
+                            </para></listitem>
+                        <listitem><para>
+                            Mirrors not in the host list are skipped and
+                            logged in debug.
+                            </para></listitem>
+                        <listitem><para>
+                            Attempts to access networks not in the host list
+                            cause a failure.
+                            </para></listitem>
+                    </itemizedlist>
+                    Using <filename>BB_ALLOWED_NETWORKS</filename> in
+                    conjunction with
+                    <link linkend='var-PREMIRRORS'><filename>PREMIRRORS</filename></link>
+                    is very useful.
+                    Adding the host you want to use to
+                    <filename>PREMIRRORS</filename> results in the source code
+                    being fetched from an allowed location and avoids raising
+                    an error when a host that is not allowed is in a
+                    <link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
+                    statement.
+                    This is because the fetcher does not attempt to use the
+                    host listed in <filename>SRC_URI</filename> after a
+                    successful fetch from the
+                    <filename>PREMIRRORS</filename> occurs.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_CONSOLELOG'><glossterm>BB_CONSOLELOG</glossterm>
+            <glossdef>
+                <para>
+                    Specifies the path to a log file into which BitBake's user
+                    interface writes output during the build.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_CURRENTTASK'><glossterm>BB_CURRENTTASK</glossterm>
+            <glossdef>
+                <para>
+                    Contains the name of the currently running task.
+                    The name does not include the
+                    <filename>do_</filename> prefix.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_DANGLINGAPPENDS_WARNONLY'><glossterm>BB_DANGLINGAPPENDS_WARNONLY</glossterm>
+            <glossdef>
+                <para>
+                    Defines how BitBake handles situations where an append
+                    file (<filename>.bbappend</filename>) has no
+                    corresponding recipe file (<filename>.bb</filename>).
+                    This condition often occurs when layers get out of sync
+                    (e.g. <filename>oe-core</filename> bumps a
+                    recipe version and the old recipe no longer exists and the
+                    other layer has not been updated to the new version
+                    of the recipe yet).
+                </para>
+
+                <para>
+                    The default fatal behavior is safest because it is
+                    the sane reaction given something is out of sync.
+                    It is important to realize when your changes are no longer
+                    being applied.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_DEFAULT_TASK'><glossterm>BB_DEFAULT_TASK</glossterm>
+            <glossdef>
+                <para>
+                    The default task to use when none is specified (e.g.
+                    with the <filename>-c</filename> command line option).
+                    The task name specified should not include the
+                    <filename>do_</filename> prefix.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_DISKMON_DIRS'><glossterm>BB_DISKMON_DIRS</glossterm>
+            <glossdef>
+                <para>
+                    Monitors disk space and available inodes during the build
+                    and allows you to control the build based on these
+                    parameters.
+                </para>
+
+                <para>
+                    Disk space monitoring is disabled by default.
+                    When setting this variable, use the following form:
+                    <literallayout class='monospaced'>
+     BB_DISKMON_DIRS = "&lt;action&gt;,&lt;dir&gt;,&lt;threshold&gt; [...]"
+
+     where:
+
+        &lt;action&gt; is:
+           ABORT:     Immediately abort the build when
+                      a threshold is broken.
+           STOPTASKS: Stop the build after the currently
+                      executing tasks have finished when
+                      a threshold is broken.
+           WARN:      Issue a warning but continue the
+                      build when a threshold is broken.
+                      Subsequent warnings are issued as
+                      defined by the
+                      <link linkend='var-BB_DISKMON_WARNINTERVAL'>BB_DISKMON_WARNINTERVAL</link> variable,
+                      which must be defined.
+
+        &lt;dir&gt; is:
+           Any directory you choose. You can specify one or
+           more directories to monitor by separating the
+           groupings with a space.  If two directories are
+           on the same device, only the first directory
+           is monitored.
+
+        &lt;threshold&gt; is:
+           Either the minimum available disk space,
+           the minimum number of free inodes, or
+           both.  You must specify at least one.  To
+           omit one or the other, simply omit the value.
+           Specify the threshold using G, M, K for Gbytes,
+           Mbytes, and Kbytes, respectively. If you do
+           not specify G, M, or K, Kbytes is assumed by
+           default.  Do not use GB, MB, or KB.
+                    </literallayout>
+                </para>
+
+                <para>
+                    Here are some examples:
+                    <literallayout class='monospaced'>
+     BB_DISKMON_DIRS = "ABORT,${TMPDIR},1G,100K WARN,${SSTATE_DIR},1G,100K"
+     BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},1G"
+     BB_DISKMON_DIRS = "ABORT,${TMPDIR},,100K"
+                    </literallayout>
+                    The first example works only if you also set
+                    the <link linkend='var-BB_DISKMON_WARNINTERVAL'><filename>BB_DISKMON_WARNINTERVAL</filename></link> variable.
+                    This example causes the build system to immediately
+                    abort when either the disk space in <filename>${TMPDIR}</filename> drops
+                    below 1 Gbyte or the available free inodes drops below
+                    100 Kbytes.
+                    Because two directories are provided with the variable, the
+                    build system also issues a
+                    warning when the disk space in the
+                    <filename>${SSTATE_DIR}</filename> directory drops
+                    below 1 Gbyte or the number of free inodes drops
+                    below 100 Kbytes.
+                    Subsequent warnings are issued during intervals as
+                    defined by the <filename>BB_DISKMON_WARNINTERVAL</filename>
+                    variable.
+                </para>
+
+                <para>
+                    The second example stops the build after all currently
+                    executing tasks complete when the minimum disk space
+                    in the <filename>${TMPDIR}</filename>
+                    directory drops below 1 Gbyte.
+                    No disk monitoring occurs for the free inodes in this case.
+                </para>
+
+                <para>
+                    The final example immediately aborts the build when the
+                    number of free inodes in the <filename>${TMPDIR}</filename> directory
+                    drops below 100 Kbytes.
+                    No disk space monitoring for the directory itself occurs
+                    in this case.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_DISKMON_WARNINTERVAL'><glossterm>BB_DISKMON_WARNINTERVAL</glossterm>
+            <glossdef>
+                <para>
+                    Defines the disk space and free inode warning intervals.
+                </para>
+
+                <para>
+                    If you are going to use the
+                    <filename>BB_DISKMON_WARNINTERVAL</filename> variable, you must
+                    also use the
+                    <link linkend='var-BB_DISKMON_DIRS'><filename>BB_DISKMON_DIRS</filename></link> variable
+                    and define its action as "WARN".
+                    During the build, subsequent warnings are issued each time
+                    disk space or number of free inodes further reduces by
+                    the respective interval.
+                </para>
+
+                <para>
+                    If you do not provide a <filename>BB_DISKMON_WARNINTERVAL</filename>
+                    variable and you do use <filename>BB_DISKMON_DIRS</filename> with
+                    the "WARN" action, the disk monitoring interval defaults to
+                    the following:
+                    <literallayout class='monospaced'>
+     BB_DISKMON_WARNINTERVAL = "50M,5K"
+                    </literallayout>
+                </para>
+
+                <para>
+                    When specifying the variable in your configuration file,
+                    use the following form:
+                    <literallayout class='monospaced'>
+     BB_DISKMON_WARNINTERVAL = "&lt;disk_space_interval&gt;,&lt;disk_inode_interval&gt;"
+
+     where:
+
+        &lt;disk_space_interval&gt; is:
+           An interval of memory expressed in either
+           G, M, or K for Gbytes, Mbytes, or Kbytes,
+           respectively. You cannot use GB, MB, or KB.
+
+        &lt;disk_inode_interval&gt; is:
+           An interval of free inodes expressed in either
+           G, M, or K for Gbytes, Mbytes, or Kbytes,
+           respectively. You cannot use GB, MB, or KB.
+                    </literallayout>
+                </para>
+
+                <para>
+                    Here is an example:
+                    <literallayout class='monospaced'>
+     BB_DISKMON_DIRS = "WARN,${SSTATE_DIR},1G,100K"
+     BB_DISKMON_WARNINTERVAL = "50M,5K"
+                    </literallayout>
+                    These variables cause BitBake to
+                    issue subsequent warnings each time the available
+                    disk space further reduces by 50 Mbytes or the number
+                    of free inodes further reduces by 5 Kbytes in the
+                    <filename>${SSTATE_DIR}</filename> directory.
+                    Subsequent warnings based on the interval occur each time
+                    a respective interval is reached beyond the initial warning
+                    (i.e. 1 Gbytes and 100 Kbytes).
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_ENV_WHITELIST'><glossterm>BB_ENV_WHITELIST</glossterm>
+            <glossdef>
+                <para>
+                    Specifies the internal whitelist of variables to allow
+                    through from the external environment into BitBake's
+                    datastore.
+                    If the value of this variable is not specified
+                    (which is the default), the following list is used:
+                    <link linkend='var-BBPATH'><filename>BBPATH</filename></link>,
+                    <link linkend='var-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link>,
+                    <link linkend='var-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>,
+                    and
+                    <link linkend='var-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link>.
+                    <note>
+                        You must set this variable in the external environment
+                        in order for it to work.
+                    </note>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_ENV_EXTRAWHITE'><glossterm>BB_ENV_EXTRAWHITE</glossterm>
+            <glossdef>
+                <para>
+                    Specifies an additional set of variables to allow through
+                    (whitelist) from the external environment into BitBake's
+                    datastore.
+                    This list of variables are on top of the internal list
+                    set in
+                    <link linkend='var-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>.
+                    <note>
+                        You must set this variable in the external
+                        environment in order for it to work.
+                    </note>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_FETCH_PREMIRRORONLY'><glossterm>BB_FETCH_PREMIRRORONLY</glossterm>
+            <glossdef>
+                <para>
+                    When set to "1", causes BitBake's fetcher module to only
+                    search
+                    <link linkend='var-PREMIRRORS'><filename>PREMIRRORS</filename></link>
+                    for files.
+                    BitBake will not search the main
+                    <link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
+                    or
+                    <link linkend='var-MIRRORS'><filename>MIRRORS</filename></link>.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_FILENAME'><glossterm>BB_FILENAME</glossterm>
+            <glossdef>
+                <para>
+                    Contains the filename of the recipe that owns the currently
+                    running task.
+                    For example, if the <filename>do_fetch</filename> task that
+                    resides in the <filename>my-recipe.bb</filename> is
+                    executing, the <filename>BB_FILENAME</filename> variable
+                    contains "/foo/path/my-recipe.bb".
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_GENERATE_MIRROR_TARBALLS'><glossterm>BB_GENERATE_MIRROR_TARBALLS</glossterm>
+            <glossdef>
+                <para>
+                    Causes tarballs of the Git repositories, including the
+                    Git metadata, to be placed in the
+                    <link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
+                    directory.
+                    Anyone wishing to create a source mirror would want to
+                    enable this variable.
+                </para>
+
+                <para>
+                    For performance reasons, creating and placing tarballs of
+                    the Git repositories is not the default action by BitBake.
+                    <literallayout class='monospaced'>
+     BB_GENERATE_MIRROR_TARBALLS = "1"
+                    </literallayout>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_HASHCONFIG_WHITELIST'><glossterm>BB_HASHCONFIG_WHITELIST</glossterm>
+            <glossdef>
+                <para>
+                    Lists variables that are excluded from base configuration
+                    checksum, which is used to determine if the cache can
+                    be reused.
+                </para>
+
+                <para>
+                    One of the ways BitBake determines whether to re-parse the
+                    main metadata is through checksums of the variables in the
+                    datastore of the base configuration data.
+                    There are variables that you typically want to exclude when
+                    checking whether or not to re-parse and thus rebuild the
+                    cache.
+                    As an example, you would usually exclude
+                    <filename>TIME</filename> and <filename>DATE</filename>
+                    because these variables are always changing.
+                    If you did not exclude them, BitBake would never reuse the
+                    cache.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_HASHBASE_WHITELIST'><glossterm>BB_HASHBASE_WHITELIST</glossterm>
+            <glossdef>
+                <para>
+                    Lists variables that are excluded from checksum and
+                    dependency data.
+                    Variables that are excluded can therefore change without
+                    affecting the checksum mechanism.
+                    A common example would be the variable for the path of
+                    the build.
+                    BitBake's output should not (and usually does not) depend
+                    on the directory in which it was built.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_HASHCHECK_FUNCTION'><glossterm>BB_HASHCHECK_FUNCTION</glossterm>
+            <glossdef>
+                <para>
+                    Specifies the name of the function to call during the
+                    "setscene" part of the task's execution in order to
+                    validate the list of task hashes.
+                    The function returns the list of setscene tasks that should
+                    be executed.
+                </para>
+
+                <para>
+                    At this point in the execution of the code, the objective
+                    is to quickly verify if a given setscene function is likely
+                    to work or not.
+                    It's easier to check the list of setscene functions in
+                    one pass than to call many individual tasks.
+                    The returned list need not be completely accurate.
+                    A given setscene task can still later fail.
+                    However, the more accurate the data returned, the more
+                    efficient the build will be.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_INVALIDCONF'><glossterm>BB_INVALIDCONF</glossterm>
+            <glossdef>
+                <para>
+                    Used in combination with the
+                    <filename>ConfigParsed</filename> event to trigger
+                    re-parsing the base metadata (i.e. all the
+                    recipes).
+                    The <filename>ConfigParsed</filename> event can set the
+                    variable to trigger the re-parse.
+                    You must be careful to avoid recursive loops with this
+                    functionality.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_LOGFMT'><glossterm>BB_LOGFMT</glossterm>
+            <glossdef>
+                <para>
+                    Specifies the name of the log files saved into
+                    <filename>${</filename><link linkend='var-T'><filename>T</filename></link><filename>}</filename>.
+                    By default, the <filename>BB_LOGFMT</filename> variable
+                    is undefined and the log file names get created using the
+                    following form:
+                    <literallayout class='monospaced'>
+     log.{task}.{pid}
+                    </literallayout>
+                    If you want to force log files to take a specific name,
+                    you can set this variable in a configuration file.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_NICE_LEVEL'><glossterm>BB_NICE_LEVEL</glossterm>
+            <glossdef>
+                <para>
+                    Allows BitBake to run at a specific priority
+                    (i.e. nice level).
+                    System permissions usually mean that BitBake can reduce its
+                    priority but not raise it again.
+                    See
+                    <link linkend='var-BB_TASK_NICE_LEVEL'><filename>BB_TASK_NICE_LEVEL</filename></link>
+                    for additional information.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_NO_NETWORK'><glossterm>BB_NO_NETWORK</glossterm>
+            <glossdef>
+                <para>
+                    Disables network access in the BitBake fetcher modules.
+                    With this access disabled, any command that attempts to
+                    access the network becomes an error.
+                </para>
+
+                <para>
+                    Disabling network access is useful for testing source
+                    mirrors, running builds when not connected to the Internet,
+                    and when operating in certain kinds of firewall
+                    environments.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_NUMBER_THREADS'><glossterm>BB_NUMBER_THREADS</glossterm>
+            <glossdef>
+                <para>
+                    The maximum number of tasks BitBake should run in parallel
+                    at any one time.
+                    If your host development system supports multiple cores,
+                    a good rule of thumb is to set this variable to twice the
+                    number of cores.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_NUMBER_PARSE_THREADS'><glossterm>BB_NUMBER_PARSE_THREADS</glossterm>
+            <glossdef>
+                <para>
+                    Sets the number of threads BitBake uses when parsing.
+                    By default, the number of threads is equal to the number
+                    of cores on the system.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_ORIGENV'><glossterm>BB_ORIGENV</glossterm>
+            <glossdef>
+                <para>
+                    Contains a copy of the original external environment in
+                    which BitBake was run.
+                    The copy is taken before any whitelisted variable values
+                    are filtered into BitBake's datastore.
+                    <note>
+                        The contents of this variable is a datastore object
+                        that can be queried using the normal datastore
+                        operations.
+                    </note>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_PRESERVE_ENV'><glossterm>BB_PRESERVE_ENV</glossterm>
+            <glossdef>
+                <para>
+                    Disables whitelisting and instead allows all variables
+                    through from the external environment into BitBake's
+                    datastore.
+                    <note>
+                        You must set this variable in the external
+                        environment in order for it to work.
+                    </note>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_RUNFMT'><glossterm>BB_RUNFMT</glossterm>
+            <glossdef>
+                <para>
+                    Specifies the name of the executable script files
+                    (i.e. run files) saved into
+                    <filename>${</filename><link linkend='var-T'><filename>T</filename></link><filename>}</filename>.
+                    By default, the <filename>BB_RUNFMT</filename> variable
+                    is undefined and the run file names get created using the
+                    following form:
+                    <literallayout class='monospaced'>
+     run.{task}.{pid}
+                    </literallayout>
+                    If you want to force run files to take a specific name,
+                    you can set this variable in a configuration file.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_RUNTASK'><glossterm>BB_RUNTASK</glossterm>
+            <glossdef>
+                <para>
+                    Contains the name of the currently executing task.
+                    The value does not include the "do_" prefix.
+                    For example, if the currently executing task is
+                    <filename>do_config</filename>, the value is
+                    "config".
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_SCHEDULER'><glossterm>BB_SCHEDULER</glossterm>
+            <glossdef>
+                <para>
+                    Selects the name of the scheduler to use for the
+                    scheduling of BitBake tasks.
+                    Three options exist:
+                    <itemizedlist>
+                        <listitem><para><emphasis>basic</emphasis> -
+                            The basic framework from which everything derives.
+                            Using this option causes tasks to be ordered
+                            numerically as they are parsed.
+                            </para></listitem>
+                        <listitem><para><emphasis>speed</emphasis> -
+                            Executes tasks first that have more tasks
+                            depending on them.
+                            The "speed" option is the default.
+                            </para></listitem>
+                        <listitem><para><emphasis>completion</emphasis> -
+                            Causes the scheduler to try to complete a given
+                            recipe once its build has started.
+                            </para></listitem>
+                    </itemizedlist>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_SCHEDULERS'><glossterm>BB_SCHEDULERS</glossterm>
+            <glossdef>
+                <para>
+                    Defines custom schedulers to import.
+                    Custom schedulers need to be derived from the
+                    <filename>RunQueueScheduler</filename> class.
+                </para>
+
+                <para>
+                    For information how to select a scheduler, see the
+                    <link linkend='var-BB_SCHEDULER'><filename>BB_SCHEDULER</filename></link>
+                    variable.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_SETSCENE_DEPVALID'><glossterm>BB_SETSCENE_DEPVALID</glossterm>
+            <glossdef>
+                <para>
+                    Specifies a function BitBake calls that determines
+                    whether BitBake requires a setscene dependency to be met.
+                </para>
+
+                <para>
+                    When running a setscene task, BitBake needs to
+                    know which dependencies of that setscene task also need
+                    to be run.
+                    Whether dependencies also need to be run is highly
+                    dependent on the metadata.
+                    The function specified by this variable returns a
+                    "True" or "False" depending on whether the dependency needs
+                    to be met.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_SETSCENE_VERIFY_FUNCTION'><glossterm>BB_SETSCENE_VERIFY_FUNCTION</glossterm>
+            <glossdef>
+                <para>
+                    Specifies a function to call that verifies the list of
+                    planned task execution before the main task execution
+                    happens.
+                    The function is called once BitBake has a list of setscene
+                    tasks that have run and either succeeded or failed.
+                </para>
+
+                <para>
+                    The function allows for a task list check to see if they
+                    make sense.
+                    Even if BitBake was planning to skip a task, the
+                    returned value of the function can force BitBake to run
+                    the task, which is necessary under certain metadata
+                    defined circumstances.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_SIGNATURE_EXCLUDE_FLAGS'><glossterm>BB_SIGNATURE_EXCLUDE_FLAGS</glossterm>
+            <glossdef>
+                <para>
+                    Lists variable flags (varflags)
+                    that can be safely excluded from checksum
+                    and dependency data for keys in the datastore.
+                    When generating checksum or dependency data for keys in the
+                    datastore, the flags set against that key are normally
+                    included in the checksum.
+                </para>
+
+                <para>
+                    For more information on varflags, see the
+                    "<link linkend='variable-flags'>Variable Flags</link>"
+                    section.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_SIGNATURE_HANDLER'><glossterm>BB_SIGNATURE_HANDLER</glossterm>
+            <glossdef>
+                <para>
+                    Defines the name of the signature handler BitBake uses.
+                    The signature handler defines the way stamp files are
+                    created and handled, if and how the signature is
+                    incorporated into the stamps, and how the signature
+                    itself is generated.
+                </para>
+
+                <para>
+                    A new signature handler can be added by injecting a class
+                    derived from the
+                    <filename>SignatureGenerator</filename> class into the
+                    global namespace.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_SRCREV_POLICY'><glossterm>BB_SRCREV_POLICY</glossterm>
+            <glossdef>
+                <para>
+                    Defines the behavior of the fetcher when it interacts with
+                    source control systems and dynamic source revisions.
+                    The <filename>BB_SRCREV_POLICY</filename> variable is
+                    useful when working without a network.
+                </para>
+
+                <para>
+                    The variable can be set using one of two policies:
+                    <itemizedlist>
+                        <listitem><para><emphasis>cache</emphasis> -
+                            Retains the value the system obtained previously
+                            rather than querying the source control system
+                            each time.
+                            </para></listitem>
+                        <listitem><para><emphasis>clear</emphasis> -
+                            Queries the source controls system every time.
+                            With this policy, there is no cache.
+                            The "clear" policy is the default.
+                            </para></listitem>
+                    </itemizedlist>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_STAMP_POLICY'><glossterm>BB_STAMP_POLICY</glossterm>
+            <glossdef>
+                <para>
+                    Defines the mode used for how timestamps of stamp files
+                    are compared.
+                    You can set the variable to one of the following modes:
+                    <itemizedlist>
+                        <listitem><para><emphasis>perfile</emphasis> -
+                            Timestamp comparisons are only made
+                            between timestamps of a specific recipe.
+                            This is the default mode.
+                            </para></listitem>
+                        <listitem><para><emphasis>full</emphasis> -
+                            Timestamp comparisons are made for all
+                            dependencies.
+                            </para></listitem>
+                        <listitem><para><emphasis>whitelist</emphasis> -
+                            Identical to "full" mode except timestamp
+                            comparisons are made for recipes listed in the
+                            <link linkend='var-BB_STAMP_WHITELIST'><filename>BB_STAMP_WHITELIST</filename></link>
+                            variable.
+                            </para></listitem>
+                    </itemizedlist>
+                    <note>
+                        Stamp policies are largely obsolete with the
+                        introduction of setscene tasks.
+                    </note>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_STAMP_WHITELIST'><glossterm>BB_STAMP_WHITELIST</glossterm>
+            <glossdef>
+                <para>
+                    Lists files whose stamp file timestamps are compared when
+                    the stamp policy mode is set to "whitelist".
+                    For information on stamp policies, see the
+                    <link linkend='var-BB_STAMP_POLICY'><filename>BB_STAMP_POLICY</filename></link>
+                    variable.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_STRICT_CHECKSUM'><glossterm>BB_STRICT_CHECKSUM</glossterm>
+            <glossdef>
+                <para>
+                    Sets a more strict checksum mechanism for non-local URLs.
+                    Setting this variable to a value causes BitBake
+                    to report an error if it encounters a non-local URL
+                    that does not have at least one checksum specified.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_TASK_IONICE_LEVEL'><glossterm>BB_TASK_IONICE_LEVEL</glossterm>
+            <glossdef>
+                <para>
+                    Allows adjustment of a task's Input/Output priority.
+                    During Autobuilder testing, random failures can occur
+                    for tasks due to I/O starvation.
+                    These failures occur during various QEMU runtime timeouts.
+                    You can use the <filename>BB_TASK_IONICE_LEVEL</filename>
+                    variable to adjust the I/O priority of these tasks.
+                    <note>
+                        This variable works similarly to the
+                        <link linkend='var-BB_TASK_NICE_LEVEL'><filename>BB_TASK_NICE_LEVEL</filename></link>
+                        variable except with a task's I/O priorities.
+                    </note>
+                </para>
+
+                <para>
+                    Set the variable as follows:
+                    <literallayout class='monospaced'>
+     BB_TASK_IONICE_LEVEL = "<replaceable>class</replaceable>.<replaceable>prio</replaceable>"
+                    </literallayout>
+                    For <replaceable>class</replaceable>, the default value is
+                    "2", which is a best effort.
+                    You can use "1" for realtime and "3" for idle.
+                    If you want to use realtime, you must have superuser
+                    privileges.
+                </para>
+
+                <para>
+                    For <replaceable>prio</replaceable>, you can use any
+                    value from "0", which is the highest priority, to "7",
+                    which is the lowest.
+                    The default value is "4".
+                    You do not need any special privileges to use this range
+                    of priority values.
+                    <note>
+                        In order for your I/O priority settings to take effect,
+                        you need the Completely Fair Queuing (CFQ) Scheduler
+                        selected for the backing block device.
+                        To select the scheduler, use the following command form
+                        where <replaceable>device</replaceable> is the device
+                        (e.g. sda, sdb, and so forth):
+                        <literallayout class='monospaced'>
+      $ sudo sh -c “echo cfq > /sys/block/<replaceable>device</replaceable>/queu/scheduler
+                        </literallayout>
+                    </note>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_TASK_NICE_LEVEL'><glossterm>BB_TASK_NICE_LEVEL</glossterm>
+            <glossdef>
+                <para>
+                    Allows specific tasks to change their priority
+                    (i.e. nice level).
+                </para>
+
+                <para>
+                    You can use this variable in combination with task
+                    overrides to raise or lower priorities of specific tasks.
+                    For example, on the
+                    <ulink url='http://www.yoctoproject.org'>Yocto Project</ulink>
+                    autobuilder, QEMU emulation in images is given a higher
+                    priority as compared to build tasks to ensure that images
+                    do not suffer timeouts on loaded systems.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_TASKHASH'><glossterm>BB_TASKHASH</glossterm>
+            <glossdef>
+                <para>
+                    Within an executing task, this variable holds the hash
+                    of the task as returned by the currently enabled
+                    signature generator.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_VERBOSE_LOGS'><glossterm>BB_VERBOSE_LOGS</glossterm>
+            <glossdef>
+                <para>
+                    Controls how verbose BitBake is during builds.
+                    If set, shell scripts echo commands and shell script output
+                    appears on standard out (stdout).
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BB_WORKERCONTEXT'><glossterm>BB_WORKERCONTEXT</glossterm>
+            <glossdef>
+                <para>
+                    Specifies if the current context is executing a task.
+                    BitBake sets this variable to "1" when a task is
+                    being executed.
+                    The value is not set when the task is in server context
+                    during parsing or event handling.
+                </para>
+            </glossdef>
+        </glossentry>
+
+
+        <glossentry id='var-BBCLASSEXTEND'><glossterm>BBCLASSEXTEND</glossterm>
+            <glossdef>
+                <para>
+                    Allows you to extend a recipe so that it builds variants
+                    of the software.
+                    Some examples of these variants for recipes from the
+                    OpenEmbedded Core metadata are "natives" such as
+                    <filename>quilt-native</filename>, which is a copy of
+                    Quilt built to run on the build system; "crosses" such
+                    as <filename>gcc-cross</filename>, which is a compiler
+                    built to run on the build machine but produces binaries
+                    that run on the target <filename>MACHINE</filename>;
+                    "nativesdk", which targets the SDK machine instead of
+                    <filename>MACHINE</filename>; and "mulitlibs" in the form
+                    "<filename>multilib:</filename><replaceable>multilib_name</replaceable>".
+                </para>
+
+                <para>
+                    To build a different variant of the recipe with a minimal
+                    amount of code, it usually is as simple as adding the
+                    variable to your recipe.
+                    Here are two examples.
+                    The "native" variants are from the OpenEmbedded Core
+                    metadata:
+                    <literallayout class='monospaced'>
+     BBCLASSEXTEND =+ "native nativesdk"
+     BBCLASSEXTEND =+ "multilib:<replaceable>multilib_name</replaceable>"
+                    </literallayout>
+                </para>
+             </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BBDEBUG'><glossterm>BBDEBUG</glossterm>
+            <glossdef>
+                <para>
+                    Sets the BitBake debug output level to a specific value
+                    as incremented by the <filename>-d</filename> command line
+                    option.
+                    <note>
+                        You must set this variable in the external environment
+                        in order for it to work.
+                    </note>
+                </para>
+             </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BBFILE_COLLECTIONS'><glossterm>BBFILE_COLLECTIONS</glossterm>
+            <glossdef>
+                <para>Lists the names of configured layers.
+                    These names are used to find the other <filename>BBFILE_*</filename>
+                    variables.
+                    Typically, each layer appends its name to this variable in its
+                    <filename>conf/layer.conf</filename> file.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BBFILE_PATTERN'><glossterm>BBFILE_PATTERN</glossterm>
+            <glossdef>
+                <para>Variable that expands to match files from
+                    <link linkend='var-BBFILES'><filename>BBFILES</filename></link>
+                    in a particular layer.
+                    This variable is used in the <filename>conf/layer.conf</filename> file and must
+                    be suffixed with the name of the specific layer (e.g.
+                    <filename>BBFILE_PATTERN_emenlow</filename>).</para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BBFILE_PRIORITY'><glossterm>BBFILE_PRIORITY</glossterm>
+            <glossdef>
+                <para>Assigns the priority for recipe files in each layer.</para>
+                <para>This variable is useful in situations where the same recipe appears in
+                    more than one layer.
+                    Setting this variable allows you to prioritize a
+                    layer against other layers that contain the same recipe - effectively
+                    letting you control the precedence for the multiple layers.
+                    The precedence established through this variable stands regardless of a
+                    recipe's version
+                    (<link linkend='var-PV'><filename>PV</filename></link> variable).
+                    For example, a layer that has a recipe with a higher <filename>PV</filename> value but for
+                    which the <filename>BBFILE_PRIORITY</filename> is set to have a lower precedence still has a
+                    lower precedence.</para>
+                <para>A larger value for the <filename>BBFILE_PRIORITY</filename> variable results in a higher
+                    precedence.
+                    For example, the value 6 has a higher precedence than the value 5.
+                    If not specified, the <filename>BBFILE_PRIORITY</filename> variable is set based on layer
+                    dependencies (see the
+                    <filename><link linkend='var-LAYERDEPENDS'>LAYERDEPENDS</link></filename> variable for
+                    more information.
+                    The default priority, if unspecified
+                    for a layer with no dependencies, is the lowest defined priority + 1
+                    (or 1 if no priorities are defined).</para>
+                <tip>
+                    You can use the command <filename>bitbake-layers show-layers</filename> to list
+                    all configured layers along with their priorities.
+                </tip>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BBFILES'><glossterm>BBFILES</glossterm>
+            <glossdef>
+                <para>List of recipe files BitBake uses to build software.</para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BBINCLUDED'><glossterm>BBINCLUDED</glossterm>
+            <glossdef>
+                <para>
+                    Contains a space-separated list of all of all files that
+                    BitBake's parser included during parsing of the current
+                    file.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BBINCLUDELOGS'><glossterm>BBINCLUDELOGS</glossterm>
+            <glossdef>
+                <para>
+                    If set to a value, enables printing the task log when
+                    reporting a failed task.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BBINCLUDELOGS_LINES'><glossterm>BBINCLUDELOGS_LINES</glossterm>
+            <glossdef>
+                <para>
+                    If
+                    <link linkend='var-BBINCLUDELOGS'><filename>BBINCLUDELOGS</filename></link>
+                    is set, specifies the maximum number of lines from the
+                    task log file to print when reporting a failed task.
+                    If you do not set <filename>BBINCLUDELOGS_LINES</filename>,
+                    the entire log is printed.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BBLAYERS'><glossterm>BBLAYERS</glossterm>
+            <glossdef>
+                <para>Lists the layers to enable during the build.
+                    This variable is defined in the <filename>bblayers.conf</filename> configuration
+                    file in the build directory.
+                    Here is an example:
+                    <literallayout class='monospaced'>
+     BBLAYERS = " \
+       /home/scottrif/poky/meta \
+       /home/scottrif/poky/meta-yocto \
+       /home/scottrif/poky/meta-yocto-bsp \
+       /home/scottrif/poky/meta-mykernel \
+       "
+
+                    </literallayout>
+                    This example enables four layers, one of which is a custom, user-defined layer
+                    named <filename>meta-mykernel</filename>.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BBLAYERS_FETCH_DIR'><glossterm>BBLAYERS_FETCH_DIR</glossterm>
+            <glossdef>
+                <para>
+                    Sets the base location where layers are stored.
+                    By default, this location is set to
+                    <filename>${COREBASE}</filename>.
+                    This setting is used in conjunction with
+                    <filename>bitbake-layers layerindex-fetch</filename> and
+                    tells <filename>bitbake-layers</filename> where to place
+                    the fetched layers.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BBMASK'><glossterm>BBMASK</glossterm>
+            <glossdef>
+                <para>
+                    Prevents BitBake from processing recipes and recipe
+                    append files.
+                </para>
+
+                <para>
+                    You can use the <filename>BBMASK</filename> variable
+                    to "hide" these <filename>.bb</filename> and
+                    <filename>.bbappend</filename> files.
+                    BitBake ignores any recipe or recipe append files that
+                    match any of the expressions.
+                    It is as if BitBake does not see them at all.
+                    Consequently, matching files are not parsed or otherwise
+                    used by BitBake.</para>
+                <para>
+                    The values you provide are passed to Python's regular
+                    expression compiler.
+                    The expressions are compared against the full paths to
+                    the files.
+                    For complete syntax information, see Python's
+                    documentation at
+                    <ulink url='http://docs.python.org/release/2.3/lib/re-syntax.html'></ulink>.
+                </para>
+
+                <para>
+                    The following example uses a complete regular expression
+                    to tell BitBake to ignore all recipe and recipe append
+                    files in the <filename>meta-ti/recipes-misc/</filename>
+                    directory:
+                    <literallayout class='monospaced'>
+     BBMASK = "meta-ti/recipes-misc/"
+                    </literallayout>
+                    If you want to mask out multiple directories or recipes,
+                    you can specify multiple regular expression fragments.
+                    This next example masks out multiple directories and
+                    individual recipes:
+                    <literallayout class='monospaced'>
+     BBMASK += "/meta-ti/recipes-misc/ meta-ti/recipes-ti/packagegroup/"
+     BBMASK += "/meta-oe/recipes-support/"
+     BBMASK += "/meta-foo/.*/openldap"
+     BBMASK += "opencv.*\.bbappend"
+     BBMASK += "lzma"
+                    </literallayout>
+                    <note>
+                        When specifying a directory name, use the trailing
+                        slash character to ensure you match just that directory
+                        name.
+                    </note>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BBPATH'><glossterm>BBPATH</glossterm>
+            <glossdef>
+                <para>
+                    Used by BitBake to locate class
+                    (<filename>.bbclass</filename>) and configuration
+                    (<filename>.conf</filename>) files.
+                    This variable is analogous to the
+                    <filename>PATH</filename> variable.
+                </para>
+
+                <para>
+                    If you run BitBake from a directory outside of the
+                    build directory,
+                    you must be sure to set
+                    <filename>BBPATH</filename> to point to the
+                    build directory.
+                    Set the variable as you would any environment variable
+                    and then run BitBake:
+                    <literallayout class='monospaced'>
+     $ BBPATH="<replaceable>build_directory</replaceable>"
+     $ export BBPATH
+     $ bitbake <replaceable>target</replaceable>
+                    </literallayout>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BBSERVER'><glossterm>BBSERVER</glossterm>
+            <glossdef>
+                <para>
+                    Points to the server that runs memory-resident BitBake.
+                    The variable is only used when you employ memory-resident
+                    BitBake.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BBTARGETS'><glossterm>BBTARGETS</glossterm>
+            <glossdef>
+                <para>
+                    Allows you to use a configuration file to add to the list
+                    of command-line target recipes you want to build.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BBVERSIONS'><glossterm>BBVERSIONS</glossterm>
+            <glossdef>
+                <para>
+                    Allows a single recipe to build multiple versions of a
+                    project from a single recipe file.
+                    You also able to specify conditional metadata
+                    using the
+                    <link linkend='var-OVERRIDES'><filename>OVERRIDES</filename></link>
+                    mechanism for a single version or for an optionally named
+                    range of versions.
+                </para>
+
+                <para>
+                    For more information on <filename>BBVERSIONS</filename>,
+                    see the
+                    "<link linkend='variants-class-extension-mechanism'>Variants - Class Extension Mechanism</link>"
+                    section.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BITBAKE_UI'><glossterm>BITBAKE_UI</glossterm>
+            <glossdef>
+                <para>
+                    Used to specify the UI module to use when running BitBake.
+                    Using this variable is equivalent to using the
+                    <filename>-u</filename> command-line option.
+                    <note>
+                        You must set this variable in the external environment
+                        in order for it to work.
+                    </note>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BUILDNAME'><glossterm>BUILDNAME</glossterm>
+            <glossdef>
+                <para>
+                    A name assigned to the build.
+                    The name defaults to a datetime stamp of when the build was
+                    started but can be defined by the metadata.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-BZRDIR'><glossterm>BZRDIR</glossterm>
+            <glossdef>
+                <para>
+                    The directory in which files checked out of a Bazaar
+                    system are stored.
+                </para>
+            </glossdef>
+        </glossentry>
+
+    </glossdiv>
+
+    <glossdiv id='var-glossary-c'><title>C</title>
+
+        <glossentry id='var-CACHE'><glossterm>CACHE</glossterm>
+            <glossdef>
+                <para>
+                    Specifies the directory BitBake uses to store a cache
+                    of the metadata so it does not need to be parsed every
+                    time BitBake is started.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-CVSDIR'><glossterm>CVSDIR</glossterm>
+            <glossdef>
+                <para>
+                    The directory in which files checked out under the
+                    CVS system are stored.
+                </para>
+            </glossdef>
+        </glossentry>
+
+    </glossdiv>
+
+    <glossdiv id='var-glossary-d'><title>D</title>
+
+        <glossentry id='var-DEFAULT_PREFERENCE'><glossterm>DEFAULT_PREFERENCE</glossterm>
+            <glossdef>
+                <para>
+                    Specifies a weak bias for recipe selection priority.
+                </para>
+                <para>
+                    The most common usage of this is variable is to set
+                    it to "-1" within a recipe for a development version of a
+                    piece of software.
+                    Using the variable in this way causes the stable version
+                    of the recipe to build by default in the absence of
+                    <filename><link linkend='var-PREFERRED_VERSION'>PREFERRED_VERSION</link></filename>
+                    being used to build the development version.
+                </para>
+                <note>
+                    The bias provided by <filename>DEFAULT_PREFERENCE</filename>
+                    is weak and is overridden by
+                    <filename><link linkend='var-BBFILE_PRIORITY'>BBFILE_PRIORITY</link></filename>
+                    if that variable is different between two layers
+                    that contain different versions of the same recipe.
+                </note>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-DEPENDS'><glossterm>DEPENDS</glossterm>
+            <glossdef>
+                <para>
+                    Lists a recipe's build-time dependencies
+                    (i.e. other recipe files).
+                </para>
+
+                <para>
+                    Consider this simple example for two recipes named "a" and
+                    "b" that produce similarly named packages.
+                    In this example, the <filename>DEPENDS</filename>
+                    statement appears in the "a" recipe:
+                    <literallayout class='monospaced'>
+     DEPENDS = "b"
+                    </literallayout>
+                    Here, the dependency is such that the
+                    <filename>do_configure</filename> task for recipe "a"
+                    depends on the <filename>do_populate_sysroot</filename>
+                    task of recipe "b".
+                    This means anything that recipe "b" puts into sysroot
+                    is available when recipe "a" is configuring itself.
+                </para>
+
+                <para>
+                    For information on runtime dependencies, see the
+                    <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
+                    variable.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-DESCRIPTION'><glossterm>DESCRIPTION</glossterm>
+            <glossdef>
+                <para>
+                    A long description for the recipe.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-DL_DIR'><glossterm>DL_DIR</glossterm>
+            <glossdef>
+                <para>
+                    The central download directory used by the build process to
+                    store downloads.
+                    By default, <filename>DL_DIR</filename> gets files
+                    suitable for mirroring for everything except Git
+                    repositories.
+                    If you want tarballs of Git repositories, use the
+                    <link linkend='var-BB_GENERATE_MIRROR_TARBALLS'><filename>BB_GENERATE_MIRROR_TARBALLS</filename></link>
+                    variable.
+                </para>
+            </glossdef>
+
+        </glossentry>
+    </glossdiv>
+
+    <glossdiv id='var-glossary-e'><title>E</title>
+
+        <glossentry id='var-EXCLUDE_FROM_WORLD'><glossterm>EXCLUDE_FROM_WORLD</glossterm>
+            <glossdef>
+                <para>
+                    Directs BitBake to exclude a recipe from world builds (i.e.
+                    <filename>bitbake world</filename>).
+                    During world builds, BitBake locates, parses and builds all
+                    recipes found in every layer exposed in the
+                    <filename>bblayers.conf</filename> configuration file.
+                </para>
+
+                <para>
+                    To exclude a recipe from a world build using this variable,
+                    set the variable to "1" in the recipe.
+                </para>
+
+                <note>
+                    Recipes added to <filename>EXCLUDE_FROM_WORLD</filename>
+                    may still be built during a world build in order to satisfy
+                    dependencies of other recipes.
+                    Adding a recipe to <filename>EXCLUDE_FROM_WORLD</filename>
+                    only ensures that the recipe is not explicitly added
+                    to the list of build targets in a world build.
+                </note>
+            </glossdef>
+        </glossentry>
+
+    </glossdiv>
+
+    <glossdiv id='var-glossary-f'><title>F</title>
+
+        <glossentry id='var-FAKEROOT'><glossterm>FAKEROOT</glossterm>
+            <glossdef>
+                <para>
+                     Contains the command to use when running a shell script
+                     in a fakeroot environment.
+                     The <filename>FAKEROOT</filename> variable is obsolete
+                     and has been replaced by the other
+                     <filename>FAKEROOT*</filename> variables.
+                     See these entries in the glossary for more information.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-FAKEROOTBASEENV'><glossterm>FAKEROOTBASEENV</glossterm>
+            <glossdef>
+                <para>
+                     Lists environment variables to set when executing
+                     the command defined by
+                     <link linkend='var-FAKEROOTCMD'><filename>FAKEROOTCMD</filename></link>
+                     that starts the bitbake-worker process
+                     in the fakeroot environment.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-FAKEROOTCMD'><glossterm>FAKEROOTCMD</glossterm>
+            <glossdef>
+                <para>
+                     Contains the command that starts the bitbake-worker
+                     process in the fakeroot environment.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-FAKEROOTDIRS'><glossterm>FAKEROOTDIRS</glossterm>
+            <glossdef>
+                <para>
+                     Lists directories to create before running a task in
+                     the fakeroot environment.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-FAKEROOTENV'><glossterm>FAKEROOTENV</glossterm>
+            <glossdef>
+                <para>
+                     Lists environment variables to set when running a task
+                     in the fakeroot environment.
+                     For additional information on environment variables and
+                     the fakeroot environment, see the
+                     <link linkend='var-FAKEROOTBASEENV'><filename>FAKEROOTBASEENV</filename></link>
+                     variable.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-FAKEROOTNOENV'><glossterm>FAKEROOTNOENV</glossterm>
+            <glossdef>
+                <para>
+                     Lists environment variables to set when running a task
+                     that is not in the fakeroot environment.
+                     For additional information on environment variables and
+                     the fakeroot environment, see the
+                     <link linkend='var-FAKEROOTENV'><filename>FAKEROOTENV</filename></link>
+                     variable.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-FETCHCMD'><glossterm>FETCHCMD</glossterm>
+            <glossdef>
+                <para>
+                    Defines the command the BitBake fetcher module
+                    executes when running fetch operations.
+                    You need to use an override suffix when you use the
+                    variable (e.g. <filename>FETCHCMD_git</filename>
+                    or <filename>FETCHCMD_svn</filename>).
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-FILE'><glossterm>FILE</glossterm>
+            <glossdef>
+                <para>
+                    Points at the current file.
+                    BitBake sets this variable during the parsing process
+                    to identify the file being parsed.
+                    BitBake also sets this variable when a recipe is being
+                    executed to identify the recipe file.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-FILESDIR'><glossterm>FILESDIR</glossterm>
+            <glossdef>
+                <para>
+                    Specifies directories BitBake uses when searching for
+                    patches and files.
+                    The "local" fetcher module uses these directories when
+                    handling <filename>file://</filename> URLs if the file
+                    was not found using
+                    <link linkend='var-FILESPATH'><filename>FILESPATH</filename></link>.
+                    <note>
+                        The <filename>FILESDIR</filename> variable is
+                        deprecated and you should use
+                        <filename>FILESPATH</filename> in all new code.
+                    </note>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-FILESPATH'><glossterm>FILESPATH</glossterm>
+            <glossdef>
+                <para>
+                    Specifies directories BitBake uses when searching for
+                    patches and files.
+                    The "local" fetcher module uses these directories when
+                    handling <filename>file://</filename> URLs.
+                    The variable behaves like a shell <filename>PATH</filename>
+                    environment variable.
+                    The value is a colon-separated list of directories that
+                    are searched left-to-right in order.
+                </para>
+            </glossdef>
+        </glossentry>
+
+    </glossdiv>
+
+
+    <glossdiv id='var-glossary-g'><title>G</title>
+
+        <glossentry id='var-GITDIR'><glossterm>GITDIR</glossterm>
+            <glossdef>
+                <para>
+                    The directory in which a local copy of a Git repository
+                    is stored when it is cloned.
+                </para>
+            </glossdef>
+        </glossentry>
+
+    </glossdiv>
+
+
+    <glossdiv id='var-glossary-h'><title>H</title>
+
+        <glossentry id='var-HGDIR'><glossterm>HGDIR</glossterm>
+            <glossdef>
+                <para>
+                    The directory in which files checked out of a Mercurial
+                    system are stored.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-HOMEPAGE'><glossterm>HOMEPAGE</glossterm>
+            <glossdef>
+                <para>Website where more information about the software the recipe is building
+                    can be found.</para>
+            </glossdef>
+        </glossentry>
+
+    </glossdiv>
+
+    <glossdiv id='var-glossary-i'><title>I</title>
+
+        <glossentry id='var-INHERIT'><glossterm>INHERIT</glossterm>
+            <glossdef>
+                <para>
+                    Causes the named class to be inherited at
+                    this point during parsing.
+                    The variable is only valid in configuration files.
+                </para>
+            </glossdef>
+        </glossentry>
+
+    </glossdiv>
+
+<!--
+    <glossdiv id='var-glossary-j'><title>J</title>
+    </glossdiv>
+
+    <glossdiv id='var-glossary-k'><title>K</title>
+    </glossdiv>
+-->
+
+    <glossdiv id='var-glossary-l'><title>L</title>
+
+        <glossentry id='var-LAYERDEPENDS'><glossterm>LAYERDEPENDS</glossterm>
+            <glossdef>
+                <para>Lists the layers, separated by spaces, upon which this recipe depends.
+                    Optionally, you can specify a specific layer version for a dependency
+                    by adding it to the end of the layer name with a colon, (e.g. "anotherlayer:3"
+                    to be compared against
+                    <link linkend='var-LAYERVERSION'><filename>LAYERVERSION</filename></link><filename>_anotherlayer</filename>
+                    in this case).
+                    BitBake produces an error if any dependency is missing or
+                    the version numbers do not match exactly (if specified).</para>
+                <para>
+                    You use this variable in the <filename>conf/layer.conf</filename> file.
+                    You must also use the specific layer name as a suffix
+                    to the variable (e.g. <filename>LAYERDEPENDS_mylayer</filename>).</para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-LAYERDIR'><glossterm>LAYERDIR</glossterm>
+            <glossdef>
+                <para>When used inside the <filename>layer.conf</filename> configuration
+                    file, this variable provides the path of the current layer.
+                    This variable is not available outside of <filename>layer.conf</filename>
+                    and references are expanded immediately when parsing of the file completes.</para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-LAYERVERSION'><glossterm>LAYERVERSION</glossterm>
+            <glossdef>
+                <para>Optionally specifies the version of a layer as a single number.
+                    You can use this variable within
+                    <link linkend='var-LAYERDEPENDS'><filename>LAYERDEPENDS</filename></link>
+                    for another layer in order to depend on a specific version
+                    of the layer.</para>
+                <para>
+                    You use this variable in the <filename>conf/layer.conf</filename> file.
+                    You must also use the specific layer name as a suffix
+                    to the variable (e.g. <filename>LAYERDEPENDS_mylayer</filename>).</para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-LICENSE'><glossterm>LICENSE</glossterm>
+            <glossdef>
+                <para>
+                    The list of source licenses for the recipe.
+                </para>
+            </glossdef>
+        </glossentry>
+
+    </glossdiv>
+
+    <glossdiv id='var-glossary-m'><title>M</title>
+
+        <glossentry id='var-MIRRORS'><glossterm>MIRRORS</glossterm>
+            <glossdef>
+                <para>
+                    Specifies additional paths from which BitBake gets source code.
+                    When the build system searches for source code, it first
+                    tries the local download directory.
+                    If that location fails, the build system tries locations
+                    defined by
+                    <link linkend='var-PREMIRRORS'><filename>PREMIRRORS</filename></link>,
+                    the upstream source, and then locations specified by
+                    <filename>MIRRORS</filename> in that order.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-MULTI_PROVIDER_WHITELIST'><glossterm>MULTI_PROVIDER_WHITELIST</glossterm>
+            <glossdef>
+                <para>
+                    Allows you to suppress BitBake warnings caused when
+                    building two separate recipes that provide the same
+                    output.
+                </para>
+
+                <para>
+                    Bitbake normally issues a warning when building two
+                    different recipes where each provides the same output.
+                    This scenario is usually something the user does not
+                    want.
+                    However, cases do exist where it makes sense, particularly
+                    in the <filename>virtual/*</filename> namespace.
+                    You can use this variable to suppress BitBake's warnings.
+                </para>
+
+                <para>
+                    To use the variable, list provider names (e.g.
+                    recipe names, <filename>virtual/kernel</filename>,
+                    and so forth).
+                </para>
+            </glossdef>
+        </glossentry>
+
+    </glossdiv>
+
+<!--
+    <glossdiv id='var-glossary-n'><title>N</title>
+    </glossdiv>
+-->
+
+    <glossdiv id='var-glossary-o'><title>O</title>
+
+        <glossentry id='var-OVERRIDES'><glossterm>OVERRIDES</glossterm>
+            <glossdef>
+                <para>
+                    BitBake uses <filename>OVERRIDES</filename> to control
+                    what variables are overridden after BitBake parses
+                    recipes and configuration files.
+                </para>
+
+                <para>
+                    Following is a simple example that uses an overrides
+                    list based on machine architectures:
+                    <literallayout class='monospaced'>
+     OVERRIDES = "arm:x86:mips:powerpc"
+                    </literallayout>
+                    You can find information on how to use
+                    <filename>OVERRIDES</filename> in the
+                    "<link linkend='conditional-syntax-overrides'>Conditional Syntax (Overrides)</link>"
+                    section.
+                </para>
+            </glossdef>
+        </glossentry>
+    </glossdiv>
+
+    <glossdiv id='var-glossary-p'><title>P</title>
+
+        <glossentry id='var-PACKAGES'><glossterm>PACKAGES</glossterm>
+            <glossdef>
+                <para>The list of packages the recipe creates.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-PACKAGES_DYNAMIC'><glossterm>PACKAGES_DYNAMIC</glossterm>
+            <glossdef>
+                <para>
+                    A promise that your recipe satisfies runtime dependencies
+                    for optional modules that are found in other recipes.
+                    <filename>PACKAGES_DYNAMIC</filename>
+                    does not actually satisfy the dependencies, it only states that
+                    they should be satisfied.
+                    For example, if a hard, runtime dependency
+                    (<link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>)
+                    of another package is satisfied during the build
+                    through the <filename>PACKAGES_DYNAMIC</filename>
+                    variable, but a package with the module name is never actually
+                    produced, then the other package will be broken.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-PE'><glossterm>PE</glossterm>
+            <glossdef>
+                <para>
+                    The epoch of the recipe.
+                    By default, this variable is unset.
+                    The variable is used to make upgrades possible when the
+                    versioning scheme changes in some backwards incompatible
+                    way.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-PERSISTENT_DIR'><glossterm>PERSISTENT_DIR</glossterm>
+            <glossdef>
+                <para>
+                    Specifies the directory BitBake uses to store data that
+                    should be preserved between builds.
+                    In particular, the data stored is the data that uses
+                    BitBake's persistent data API and the data used by the
+                    PR Server and PR Service.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-PF'><glossterm>PF</glossterm>
+            <glossdef>
+                <para>
+                    Specifies the recipe or package name and includes all version and revision
+                    numbers (i.e. <filename>eglibc-2.13-r20+svnr15508/</filename> and
+                    <filename>bash-4.2-r1/</filename>).
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-PN'><glossterm>PN</glossterm>
+            <glossdef>
+                <para>The recipe name.</para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-PR'><glossterm>PR</glossterm>
+            <glossdef>
+                <para>The revision of the recipe.
+                    </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-PREFERRED_PROVIDER'><glossterm>PREFERRED_PROVIDER</glossterm>
+            <glossdef>
+                <para>
+                    Determines which recipe should be given preference when
+                    multiple recipes provide the same item.
+                    You should always suffix the variable with the name of the
+                    provided item, and you should set it to the
+                    <link linkend='var-PN'><filename>PN</filename></link>
+                    of the recipe to which you want to give precedence.
+                    Some examples:
+                    <literallayout class='monospaced'>
+     PREFERRED_PROVIDER_virtual/kernel ?= "linux-yocto"
+     PREFERRED_PROVIDER_virtual/xserver = "xserver-xf86"
+     PREFERRED_PROVIDER_virtual/libgl ?= "mesa"
+                    </literallayout>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-PREFERRED_PROVIDERS'><glossterm>PREFERRED_PROVIDERS</glossterm>
+            <glossdef>
+                <para>
+                    Determines which recipe should be given preference for
+                    cases where multiple recipes provide the same item.
+                    Functionally,
+                    <filename>PREFERRED_PROVIDERS</filename> is identical to
+                    <link linkend='var-PREFERRED_PROVIDER'><filename>PREFERRED_PROVIDER</filename></link>.
+                    However, the <filename>PREFERRED_PROVIDERS</filename>
+                    variable lets you define preferences for multiple
+                    situations using the following form:
+                    <literallayout class='monospaced'>
+     PREFERRED_PROVIDERS = "xxx:yyy aaa:bbb ..."
+                    </literallayout>
+                    This form is a convenient replacement for the following:
+                    <literallayout class='monospaced'>
+     PREFERRED_PROVIDER_xxx = "yyy"
+     PREFERRED_PROVIDER_aaa = "bbb"
+                    </literallayout>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-PREFERRED_VERSION'><glossterm>PREFERRED_VERSION</glossterm>
+            <glossdef>
+                <para>
+                    If there are multiple versions of recipes available, this
+                    variable determines which recipe should be given preference.
+                    You must always suffix the variable with the
+                    <link linkend='var-PN'><filename>PN</filename></link>
+                    you want to select, and you should set
+                    <link linkend='var-PV'><filename>PV</filename></link>
+                    accordingly for precedence.
+                    You can use the "<filename>%</filename>" character as a
+                    wildcard to match any number of characters, which can be
+                    useful when specifying versions that contain long revision
+                    numbers that could potentially change.
+                    Here are two examples:
+                    <literallayout class='monospaced'>
+     PREFERRED_VERSION_python = "2.7.3"
+     PREFERRED_VERSION_linux-yocto = "3.10%"
+                    </literallayout>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-PREMIRRORS'><glossterm>PREMIRRORS</glossterm>
+            <glossdef>
+                <para>
+                    Specifies additional paths from which BitBake gets source code.
+                    When the build system searches for source code, it first
+                    tries the local download directory.
+                    If that location fails, the build system tries locations
+                    defined by <filename>PREMIRRORS</filename>, the upstream
+                    source, and then locations specified by
+                    <link linkend='var-MIRRORS'><filename>MIRRORS</filename></link>
+                    in that order.
+                </para>
+
+                <para>
+                    Typically, you would add a specific server for the
+                    build system to attempt before any others by adding
+                    something like the following to your configuration:
+                    <literallayout class='monospaced'>
+     PREMIRRORS_prepend = "\
+     git://.*/.* http://www.yoctoproject.org/sources/ \n \
+     ftp://.*/.* http://www.yoctoproject.org/sources/ \n \
+     http://.*/.* http://www.yoctoproject.org/sources/ \n \
+     https://.*/.* http://www.yoctoproject.org/sources/ \n"
+                    </literallayout>
+                    These changes cause the build system to intercept
+                    Git, FTP, HTTP, and HTTPS requests and direct them to
+                    the <filename>http://</filename> sources mirror.
+                    You can use <filename>file://</filename> URLs to point
+                    to local directories or network shares as well.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-PROVIDES'><glossterm>PROVIDES</glossterm>
+            <glossdef>
+                <para>
+                    A list of aliases by which a particular recipe can be
+                    known.
+                    By default, a recipe's own
+                    <filename><link linkend='var-PN'>PN</link></filename>
+                    is implicitly already in its <filename>PROVIDES</filename>
+                    list.
+                    If a recipe uses <filename>PROVIDES</filename>, the
+                    additional aliases are synonyms for the recipe and can
+                    be useful satisfying dependencies of other recipes during
+                    the build as specified by
+                    <filename><link linkend='var-DEPENDS'>DEPENDS</link></filename>.
+                </para>
+
+                <para>
+                    Consider the following example
+                    <filename>PROVIDES</filename> statement from a recipe
+                    file <filename>libav_0.8.11.bb</filename>:
+                    <literallayout class='monospaced'>
+     PROVIDES += "libpostproc"
+                    </literallayout>
+                    The <filename>PROVIDES</filename> statement results in
+                    the "libav" recipe also being known as "libpostproc".
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-PRSERV_HOST'><glossterm>PRSERV_HOST</glossterm>
+            <glossdef>
+                <para>
+                    The network based
+                    <link linkend='var-PR'><filename>PR</filename></link>
+                    service host and port.
+                </para>
+
+                <para>
+                    Following is an example of how the <filename>PRSERV_HOST</filename> variable is
+                    set:
+                    <literallayout class='monospaced'>
+     PRSERV_HOST = "localhost:0"
+                    </literallayout>
+                    You must set the variable if you want to automatically
+                    start a local PR service.
+                    You can set <filename>PRSERV_HOST</filename> to other
+                    values to use a remote PR service.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-PV'><glossterm>PV</glossterm>
+            <glossdef>
+                <para>The version of the recipe.
+                 </para>
+            </glossdef>
+        </glossentry>
+
+    </glossdiv>
+
+<!--
+    <glossdiv id='var-glossary-q'><title>Q</title>
+    </glossdiv>
+-->
+
+    <glossdiv id='var-glossary-r'><title>R</title>
+
+        <glossentry id='var-RDEPENDS'><glossterm>RDEPENDS</glossterm>
+            <glossdef>
+                <para>
+                    Lists a package's runtime dependencies (i.e. other packages)
+                    that must be installed in order for the built package to run
+                    correctly.
+                    If a package in this list cannot be found during the build,
+                    you will get a build error.
+                </para>
+
+                <para>
+                    Because the <filename>RDEPENDS</filename> variable applies
+                    to packages being built, you should always use the variable
+                    in a form with an attached package name.
+                    For example, suppose you are building a development package
+                    that depends on the <filename>perl</filename> package.
+                    In this case, you would use the following
+                    <filename>RDEPENDS</filename> statement:
+                    <literallayout class='monospaced'>
+     RDEPENDS_${PN}-dev += "perl"
+                    </literallayout>
+                    In the example, the development package depends on
+                    the <filename>perl</filename> package.
+                    Thus, the <filename>RDEPENDS</filename> variable has the
+                    <filename>${PN}-dev</filename> package name as part of the
+                    variable.
+                </para>
+
+                <para>
+                    BitBake supports specifying versioned dependencies.
+                    Although the syntax varies depending on the packaging
+                    format, BitBake hides these differences from you.
+                    Here is the general syntax to specify versions with
+                    the <filename>RDEPENDS</filename> variable:
+                    <literallayout class='monospaced'>
+     RDEPENDS_${PN} = "<replaceable>package</replaceable> (<replaceable>operator</replaceable> <replaceable>version</replaceable>)"
+                    </literallayout>
+                    For <filename>operator</filename>, you can specify the
+                    following:
+                    <literallayout class='monospaced'>
+     =
+     &lt;
+     &gt;
+     &lt;=
+     &gt;=
+                    </literallayout>
+                    For example, the following sets up a dependency on version
+                    1.2 or greater of the package <filename>foo</filename>:
+                    <literallayout class='monospaced'>
+     RDEPENDS_${PN} = "foo (>= 1.2)"
+                    </literallayout>
+                </para>
+
+                <para>
+                    For information on build-time dependencies, see the
+                    <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
+                    variable.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-RPROVIDES'><glossterm>RPROVIDES</glossterm>
+            <glossdef>
+                <para>
+                    A list of package name aliases that a package also provides.
+                    These aliases are useful for satisfying runtime dependencies
+                    of other packages both during the build and on the target
+                    (as specified by
+                    <filename><link linkend='var-RDEPENDS'>RDEPENDS</link></filename>).
+                </para>
+                <para>
+                   As with all package-controlling variables, you must always
+                   use the variable in conjunction with a package name override.
+                   Here is an example:
+                   <literallayout class='monospaced'>
+     RPROVIDES_${PN} = "widget-abi-2"
+                   </literallayout>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-RRECOMMENDS'><glossterm>RRECOMMENDS</glossterm>
+            <glossdef>
+                <para>
+                    A list of packages that extends the usability of a package
+                    being built.
+                    The package being built does not depend on this list of
+                    packages in order to successfully build, but needs them for
+                    the extended usability.
+                    To specify runtime dependencies for packages, see the
+                    <filename><link linkend='var-RDEPENDS'>RDEPENDS</link></filename>
+                    variable.
+                </para>
+
+                <para>
+                    BitBake supports specifying versioned recommends.
+                    Although the syntax varies depending on the packaging
+                    format, BitBake hides these differences from you.
+                    Here is the general syntax to specify versions with
+                    the <filename>RRECOMMENDS</filename> variable:
+                    <literallayout class='monospaced'>
+     RRECOMMENDS_${PN} = "<replaceable>package</replaceable> (<replaceable>operator</replaceable> <replaceable>version</replaceable>)"
+                    </literallayout>
+                    For <filename>operator</filename>, you can specify the
+                    following:
+                    <literallayout class='monospaced'>
+     =
+     &lt;
+     &gt;
+     &lt;=
+     &gt;=
+                    </literallayout>
+                    For example, the following sets up a recommend on version
+                    1.2 or greater of the package <filename>foo</filename>:
+                    <literallayout class='monospaced'>
+     RRECOMMENDS_${PN} = "foo (>= 1.2)"
+                    </literallayout>
+                </para>
+            </glossdef>
+        </glossentry>
+
+    </glossdiv>
+
+    <glossdiv id='var-glossary-s'><title>S</title>
+
+        <glossentry id='var-SECTION'><glossterm>SECTION</glossterm>
+            <glossdef>
+                <para>The section in which packages should be categorized.</para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-SRC_URI'><glossterm>SRC_URI</glossterm>
+            <glossdef>
+                <para>
+                    The list of source files - local or remote.
+                    This variable tells BitBake which bits
+                    to pull for the build and how to pull them.
+                    For example, if the recipe or append file needs to
+                    fetch a single tarball from the Internet, the recipe or
+                    append file uses a <filename>SRC_URI</filename>
+                    entry that specifies that tarball.
+                    On the other hand, if the recipe or append file needs to
+                    fetch a tarball and include a custom file, the recipe or
+                    append file needs an <filename>SRC_URI</filename> variable
+                    that specifies all those sources.</para>
+                <para>The following list explains the available URI protocols:
+                    <itemizedlist>
+                        <listitem><para><emphasis><filename>file://</filename> -</emphasis>
+                            Fetches files, which are usually files shipped with
+                            the metadata,
+                            from the local machine.
+                            The path is relative to the
+                            <link linkend='var-FILESPATH'><filename>FILESPATH</filename></link>
+                            variable.</para></listitem>
+                        <listitem><para><emphasis><filename>bzr://</filename> -</emphasis> Fetches files from a
+                            Bazaar revision control repository.</para></listitem>
+                        <listitem><para><emphasis><filename>git://</filename> -</emphasis> Fetches files from a
+                            Git revision control repository.</para></listitem>
+                        <listitem><para><emphasis><filename>osc://</filename> -</emphasis> Fetches files from
+                            an OSC (OpenSUSE Build service) revision control repository.</para></listitem>
+                        <listitem><para><emphasis><filename>repo://</filename> -</emphasis> Fetches files from
+                            a repo (Git) repository.</para></listitem>
+                        <listitem><para><emphasis><filename>http://</filename> -</emphasis> Fetches files from
+                            the Internet using HTTP.</para></listitem>
+                        <listitem><para><emphasis><filename>https://</filename> -</emphasis> Fetches files
+                            from the Internet using HTTPS.</para></listitem>
+                        <listitem><para><emphasis><filename>ftp://</filename> -</emphasis> Fetches files
+                            from the Internet using FTP.</para></listitem>
+                        <listitem><para><emphasis><filename>cvs://</filename> -</emphasis> Fetches files from
+                            a CVS revision control repository.</para></listitem>
+                        <listitem><para><emphasis><filename>hg://</filename> -</emphasis> Fetches files from
+                            a Mercurial (<filename>hg</filename>) revision control repository.</para></listitem>
+                        <listitem><para><emphasis><filename>p4://</filename> -</emphasis> Fetches files from
+                            a Perforce (<filename>p4</filename>) revision control repository.</para></listitem>
+                        <listitem><para><emphasis><filename>ssh://</filename> -</emphasis> Fetches files from
+                            a secure shell.</para></listitem>
+                        <listitem><para><emphasis><filename>svn://</filename> -</emphasis> Fetches files from
+                            a Subversion (<filename>svn</filename>) revision control repository.</para></listitem>
+                    </itemizedlist>
+                </para>
+                <para>Here are some additional options worth mentioning:
+                    <itemizedlist>
+                        <listitem><para><emphasis><filename>unpack</filename> -</emphasis> Controls
+                            whether or not to unpack the file if it is an archive.
+                            The default action is to unpack the file.</para></listitem>
+                        <listitem><para><emphasis><filename>subdir</filename> -</emphasis> Places the file
+                            (or extracts its contents) into the specified
+                            subdirectory.
+                            This option is useful for unusual tarballs or other archives that
+                            do not have their files already in a subdirectory within the archive.
+                            </para></listitem>
+                        <listitem><para><emphasis><filename>name</filename> -</emphasis> Specifies a
+                            name to be used for association with <filename>SRC_URI</filename> checksums
+                            when you have more than one file specified in <filename>SRC_URI</filename>.
+                            </para></listitem>
+                        <listitem><para><emphasis><filename>downloadfilename</filename> -</emphasis> Specifies
+                            the filename used when storing the downloaded file.</para></listitem>
+                    </itemizedlist>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-SRCDATE'><glossterm>SRCDATE</glossterm>
+            <glossdef>
+                <para>
+                    The date of the source code used to build the package.
+                    This variable applies only if the source was fetched from a Source Code Manager (SCM).
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-SRCREV'><glossterm>SRCREV</glossterm>
+            <glossdef>
+                <para>
+                    The revision of the source code used to build the package.
+                    This variable applies only when using Subversion, Git, Mercurial and Bazaar.
+                    If you want to build a fixed revision and you want
+                    to avoid performing a query on the remote repository every time
+                    BitBake parses your recipe, you should specify a <filename>SRCREV</filename> that is a
+                    full revision identifier and not just a tag.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-SRCREV_FORMAT'><glossterm>SRCREV_FORMAT</glossterm>
+            <glossdef>
+                <para>
+                    Helps construct valid
+                    <link linkend='var-SRCREV'><filename>SRCREV</filename></link>
+                    values when multiple source controlled URLs are used in
+                    <link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>.
+                </para>
+
+                <para>
+                    The system needs help constructing these values under these
+                    circumstances.
+                    Each component in the <filename>SRC_URI</filename>
+                    is assigned a name and these are referenced
+                    in the <filename>SRCREV_FORMAT</filename> variable.
+                    Consider an example with URLs named "machine" and "meta".
+                    In this case, <filename>SRCREV_FORMAT</filename> could look
+                    like "machine_meta" and those names would have the SCM
+                    versions substituted into each position.
+                    Only one <filename>AUTOINC</filename> placeholder is added
+                    and if needed.
+                    And, this placeholder is placed at the start of the
+                    returned string.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-STAMP'><glossterm>STAMP</glossterm>
+            <glossdef>
+                <para>
+                    Specifies the base path used to create recipe stamp files.
+                    The path to an actual stamp file is constructed by evaluating this
+                    string and then appending additional information.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-STAMPCLEAN'><glossterm>STAMPCLEAN</glossterm>
+            <glossdef>
+                <para>
+                    Specifies the base path used to create recipe stamp files.
+                    Unlike the
+                    <link linkend='var-STAMP'><filename>STAMP</filename></link>
+                    variable, <filename>STAMPCLEAN</filename> can contain
+                    wildcards to match the range of files a clean operation
+                    should remove.
+                    BitBake uses a clean operation to remove any other stamps
+                    it should be removing when creating a new stamp.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-SUMMARY'><glossterm>SUMMARY</glossterm>
+            <glossdef>
+                <para>
+                    A short summary for the recipe, which is 72 characters or less.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-SVNDIR'><glossterm>SVNDIR</glossterm>
+            <glossdef>
+                <para>
+                    The directory in which files checked out of a Subversion
+                    system are stored.
+                </para>
+            </glossdef>
+        </glossentry>
+
+    </glossdiv>
+
+    <glossdiv id='var-glossary-t'><title>T</title>
+
+        <glossentry id='var-T'><glossterm>T</glossterm>
+            <glossdef>
+                <para>Points to a directory were BitBake places
+                    temporary files, which consist mostly of task logs and
+                    scripts, when building a particular recipe.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-TOPDIR'><glossterm>TOPDIR</glossterm>
+            <glossdef>
+                <para>
+                    Points to the build directory.
+                    BitBake automatically sets this variable.
+                </para>
+            </glossdef>
+        </glossentry>
+
+    </glossdiv>
+
+<!--
+    <glossdiv id='var-glossary-u'><title>U</title>
+    </glossdiv>
+
+    <glossdiv id='var-glossary-v'><title>V</title>
+   </glossdiv>
+
+    <glossdiv id='var-glossary-w'><title>W</title>
+    </glossdiv>
+
+    <glossdiv id='var-glossary-x'><title>X</title>
+    </glossdiv>
+
+    <glossdiv id='var-glossary-y'><title>Y</title>
+    </glossdiv>
+
+    <glossdiv id='var-glossary-z'><title>Z</title>
+    </glossdiv>
+-->
+
+
+</glossary>
+</chapter>
+<!--
+vim: expandtab tw=80 ts=4
+-->

+ 984 - 0
bitbake/doc/bitbake-user-manual/bitbake-user-manual-style.css

@@ -0,0 +1,984 @@
+/*
+   Generic XHTML / DocBook XHTML CSS Stylesheet.
+
+   Browser wrangling and typographic design by
+      Oyvind Kolas / pippin@gimp.org
+
+   Customised for Poky by
+      Matthew Allum / mallum@o-hand.com
+
+   Thanks to:
+     Liam R. E. Quin
+     William Skaggs
+     Jakub Steiner
+
+   Structure
+   ---------
+
+   The stylesheet is divided into the following sections:
+
+       Positioning
+          Margins, paddings, width, font-size, clearing.
+       Decorations
+          Borders, style
+       Colors
+          Colors
+       Graphics
+          Graphical backgrounds
+       Nasty IE tweaks
+          Workarounds needed to make it work in internet explorer,
+          currently makes the stylesheet non validating, but up until
+          this point it is validating.
+       Mozilla extensions
+          Transparency for footer
+	  Rounded corners on boxes
+
+*/
+
+
+  /*************** /
+ /  Positioning   /
+/ ***************/
+
+body {
+  font-family: Verdana, Sans, sans-serif;
+
+  min-width: 640px;
+  width: 80%;
+  margin:  0em auto;
+  padding: 2em 5em 5em 5em;
+  color: #333;
+}
+
+h1,h2,h3,h4,h5,h6,h7 {
+  font-family: Arial, Sans;
+  color: #00557D;
+  clear: both;
+}
+
+h1 {
+  font-size: 2em;
+  text-align: left;
+  padding: 0em 0em 0em 0em;
+  margin: 2em 0em 0em 0em;
+}
+
+h2.subtitle {
+  margin: 0.10em 0em 3.0em 0em;
+  padding: 0em 0em 0em 0em;
+  font-size: 1.8em;
+  padding-left: 20%;
+  font-weight: normal;
+  font-style: italic;
+}
+
+h2 {
+  margin: 2em 0em 0.66em 0em;
+  padding: 0.5em 0em 0em 0em;
+  font-size: 1.5em;
+  font-weight: bold;
+}
+
+h3.subtitle {
+  margin: 0em 0em 1em 0em;
+  padding: 0em 0em 0em 0em;
+  font-size: 142.14%;
+  text-align: right;
+}
+
+h3 {
+  margin: 1em 0em 0.5em 0em;
+  padding: 1em 0em 0em 0em;
+  font-size: 140%;
+  font-weight: bold;
+}
+
+h4 {
+  margin: 1em 0em 0.5em 0em;
+  padding: 1em 0em 0em 0em;
+  font-size: 120%;
+  font-weight: bold;
+}
+
+h5 {
+  margin: 1em 0em 0.5em 0em;
+  padding: 1em 0em 0em 0em;
+  font-size: 110%;
+  font-weight: bold;
+}
+
+h6 {
+  margin: 1em 0em 0em 0em;
+  padding: 1em 0em 0em 0em;
+  font-size: 110%;
+  font-weight: bold;
+}
+
+.authorgroup {
+  background-color: transparent;
+  background-repeat: no-repeat;
+  padding-top: 256px;
+  background-image: url("figures/bitbake-title.png");
+  background-position: left top;
+  margin-top: -256px;
+  padding-right: 50px;
+  margin-left: 0px;
+  text-align: right;
+  width: 740px;
+}
+
+h3.author {
+  margin: 0em 0me 0em 0em;
+  padding: 0em 0em 0em 0em;
+  font-weight: normal;
+  font-size: 100%;
+  color: #333;
+  clear: both;
+}
+
+.author tt.email {
+  font-size: 66%;
+}
+
+.titlepage hr {
+  width: 0em;
+  clear: both;
+}
+
+.revhistory {
+  padding-top: 2em;
+  clear: both;
+}
+
+.toc,
+.list-of-tables,
+.list-of-examples,
+.list-of-figures {
+  padding: 1.33em 0em 2.5em 0em;
+  color: #00557D;
+}
+
+.toc p,
+.list-of-tables p,
+.list-of-figures p,
+.list-of-examples p {
+  padding: 0em 0em 0em 0em;
+  padding: 0em 0em 0.3em;
+  margin: 1.5em 0em 0em 0em;
+}
+
+.toc p b,
+.list-of-tables p b,
+.list-of-figures p b,
+.list-of-examples p b{
+  font-size: 100.0%;
+  font-weight: bold;
+}
+
+.toc dl,
+.list-of-tables dl,
+.list-of-figures dl,
+.list-of-examples dl {
+  margin: 0em 0em 0.5em 0em;
+  padding: 0em 0em 0em 0em;
+}
+
+.toc dt {
+  margin: 0em 0em 0em 0em;
+  padding: 0em 0em 0em 0em;
+}
+
+.toc dd {
+  margin: 0em 0em 0em 2.6em;
+  padding: 0em 0em 0em 0em;
+}
+
+div.glossary dl,
+div.variablelist dl {
+}
+
+.glossary dl dt,
+.variablelist dl dt,
+.variablelist dl dt span.term {
+  font-weight: normal;
+  width: 20em;
+  text-align: right;
+}
+
+.variablelist dl dt {
+  margin-top: 0.5em;
+}
+
+.glossary dl dd,
+.variablelist dl dd {
+  margin-top: -1em;
+  margin-left: 25.5em;
+}
+
+.glossary dd p,
+.variablelist dd p {
+  margin-top: 0em;
+  margin-bottom: 1em;
+}
+
+
+div.calloutlist table td {
+  padding: 0em 0em 0em 0em;
+  margin: 0em 0em 0em 0em;
+}
+
+div.calloutlist table td p {
+  margin-top: 0em;
+  margin-bottom: 1em;
+}
+
+div p.copyright {
+  text-align: left;
+}
+
+div.legalnotice p.legalnotice-title {
+  margin-bottom: 0em;
+}
+
+p {
+  line-height: 1.5em;
+  margin-top: 0em;
+
+}
+
+dl {
+  padding-top: 0em;
+}
+
+hr {
+  border: solid 1px;
+}
+
+
+.mediaobject,
+.mediaobjectco {
+  text-align: center;
+}
+
+img {
+  border: none;
+}
+
+ul {
+  padding: 0em 0em 0em 1.5em;
+}
+
+ul li {
+  padding: 0em 0em 0em 0em;
+}
+
+ul li p {
+  text-align: left;
+}
+
+table {
+  width :100%;
+}
+
+th {
+  padding: 0.25em;
+  text-align: left;
+  font-weight: normal;
+  vertical-align: top;
+}
+
+td {
+  padding: 0.25em;
+  vertical-align: top;
+}
+
+p a[id] {
+  margin: 0px;
+  padding: 0px;
+  display: inline;
+  background-image: none;
+}
+
+a {
+  text-decoration: underline;
+  color: #444;
+}
+
+pre {
+    overflow: auto;
+}
+
+a:hover {
+  text-decoration: underline;
+  /*font-weight: bold;*/
+}
+
+/* This style defines how the permalink character
+   appears by itself and when hovered over with
+   the mouse. */
+
+[alt='Permalink'] { color: #eee; }
+[alt='Permalink']:hover { color: black; }
+
+
+div.informalfigure,
+div.informalexample,
+div.informaltable,
+div.figure,
+div.table,
+div.example {
+  margin: 1em 0em;
+  padding: 1em;
+  page-break-inside: avoid;
+}
+
+
+div.informalfigure p.title b,
+div.informalexample p.title b,
+div.informaltable p.title b,
+div.figure p.title b,
+div.example p.title b,
+div.table p.title b{
+    padding-top: 0em;
+    margin-top: 0em;
+    font-size: 100%;
+    font-weight: normal;
+}
+
+.mediaobject .caption,
+.mediaobject .caption p  {
+  text-align: center;
+  font-size: 80%;
+  padding-top: 0.5em;
+  padding-bottom: 0.5em;
+}
+
+.epigraph {
+  padding-left: 55%;
+  margin-bottom: 1em;
+}
+
+.epigraph p {
+  text-align: left;
+}
+
+.epigraph .quote {
+  font-style: italic;
+}
+.epigraph .attribution {
+  font-style: normal;
+  text-align: right;
+}
+
+span.application {
+  font-style: italic;
+}
+
+.programlisting {
+  font-family: monospace;
+  font-size: 80%;
+  white-space: pre;
+  margin: 1.33em 0em;
+  padding: 1.33em;
+}
+
+.tip,
+.warning,
+.caution,
+.note {
+  margin-top: 1em;
+  margin-bottom: 1em;
+
+}
+
+/* force full width of table within div */
+.tip table,
+.warning table,
+.caution table,
+.note table {
+  border: none;
+  width: 100%;
+}
+
+
+.tip table th,
+.warning table th,
+.caution table th,
+.note table th {
+  padding: 0.8em 0.0em 0.0em 0.0em;
+  margin : 0em 0em 0em 0em;
+}
+
+.tip p,
+.warning p,
+.caution p,
+.note p {
+  margin-top: 0.5em;
+  margin-bottom: 0.5em;
+  padding-right: 1em;
+  text-align: left;
+}
+
+.acronym {
+  text-transform: uppercase;
+}
+
+b.keycap,
+.keycap {
+  padding: 0.09em 0.3em;
+  margin: 0em;
+}
+
+.itemizedlist li {
+  clear: none;
+}
+
+.filename {
+  font-size: medium;
+  font-family: Courier, monospace;
+}
+
+
+div.navheader, div.heading{
+  position: absolute;
+  left: 0em;
+  top: 0em;
+  width: 100%;
+  background-color: #cdf;
+  width: 100%;
+}
+
+div.navfooter, div.footing{
+  position: fixed;
+  left: 0em;
+  bottom: 0em;
+  background-color: #eee;
+  width: 100%;
+}
+
+
+div.navheader td,
+div.navfooter td {
+  font-size: 66%;
+}
+
+div.navheader table th {
+  /*font-family: Georgia, Times, serif;*/
+  /*font-size: x-large;*/
+  font-size: 80%;
+}
+
+div.navheader table {
+  border-left: 0em;
+  border-right: 0em;
+  border-top: 0em;
+  width: 100%;
+}
+
+div.navfooter table {
+  border-left: 0em;
+  border-right: 0em;
+  border-bottom: 0em;
+  width: 100%;
+}
+
+div.navheader table td a,
+div.navfooter table td a {
+  color: #777;
+  text-decoration: none;
+}
+
+/* normal text in the footer */
+div.navfooter table td {
+  color: black;
+}
+
+div.navheader table td a:visited,
+div.navfooter table td a:visited {
+  color: #444;
+}
+
+
+/* links in header and footer */
+div.navheader table td a:hover,
+div.navfooter table td a:hover {
+  text-decoration: underline;
+  background-color: transparent;
+  color: #33a;
+}
+
+div.navheader hr,
+div.navfooter hr {
+  display: none;
+}
+
+
+.qandaset tr.question td p {
+  margin: 0em 0em 1em 0em;
+  padding: 0em 0em 0em 0em;
+}
+
+.qandaset tr.answer td p {
+  margin: 0em 0em 1em 0em;
+  padding: 0em 0em 0em 0em;
+}
+.answer td {
+  padding-bottom: 1.5em;
+}
+
+.emphasis {
+  font-weight: bold;
+}
+
+
+  /************* /
+ / decorations  /
+/ *************/
+
+.titlepage {
+}
+
+.part .title {
+}
+
+.subtitle {
+    border: none;
+}
+
+/*
+h1 {
+  border: none;
+}
+
+h2 {
+  border-top: solid 0.2em;
+  border-bottom: solid 0.06em;
+}
+
+h3 {
+  border-top: 0em;
+  border-bottom: solid 0.06em;
+}
+
+h4 {
+  border: 0em;
+  border-bottom: solid 0.06em;
+}
+
+h5 {
+  border: 0em;
+}
+*/
+
+.programlisting {
+  border: solid 1px;
+}
+
+div.figure,
+div.table,
+div.informalfigure,
+div.informaltable,
+div.informalexample,
+div.example {
+  border: 1px solid;
+}
+
+
+
+.tip,
+.warning,
+.caution,
+.note {
+  border: 1px solid;
+}
+
+.tip table th,
+.warning table th,
+.caution table th,
+.note table th {
+  border-bottom: 1px solid;
+}
+
+.question td {
+  border-top: 1px solid black;
+}
+
+.answer {
+}
+
+
+b.keycap,
+.keycap {
+  border: 1px solid;
+}
+
+
+div.navheader, div.heading{
+  border-bottom: 1px solid;
+}
+
+
+div.navfooter, div.footing{
+  border-top: 1px solid;
+}
+
+  /********* /
+ /  colors  /
+/ *********/
+
+body {
+  color: #333;
+  background: white;
+}
+
+a {
+  background: transparent;
+}
+
+a:hover {
+  background-color: #dedede;
+}
+
+
+h1,
+h2,
+h3,
+h4,
+h5,
+h6,
+h7,
+h8 {
+  background-color: transparent;
+}
+
+hr {
+  border-color: #aaa;
+}
+
+
+.tip, .warning, .caution, .note {
+  border-color: #fff;
+}
+
+
+.tip table th,
+.warning table th,
+.caution table th,
+.note table th {
+  border-bottom-color: #fff;
+}
+
+
+.warning {
+  background-color: #f0f0f2;
+}
+
+.caution {
+  background-color: #f0f0f2;
+}
+
+.tip {
+  background-color: #f0f0f2;
+}
+
+.note {
+  background-color: #f0f0f2;
+}
+
+.glossary dl dt,
+.variablelist dl dt,
+.variablelist dl dt span.term {
+  color: #044;
+}
+
+div.figure,
+div.table,
+div.example,
+div.informalfigure,
+div.informaltable,
+div.informalexample {
+  border-color: #aaa;
+}
+
+pre.programlisting {
+  color: black;
+  background-color: #fff;
+  border-color: #aaa;
+  border-width: 2px;
+}
+
+.guimenu,
+.guilabel,
+.guimenuitem {
+  background-color: #eee;
+}
+
+
+b.keycap,
+.keycap {
+  background-color: #eee;
+  border-color: #999;
+}
+
+
+div.navheader {
+  border-color: black;
+}
+
+
+div.navfooter {
+  border-color: black;
+}
+
+
+  /*********** /
+ /  graphics  /
+/ ***********/
+
+/*
+body {
+  background-image: url("images/body_bg.jpg");
+  background-attachment: fixed;
+}
+
+.navheader,
+.note,
+.tip {
+  background-image: url("images/note_bg.jpg");
+  background-attachment: fixed;
+}
+
+.warning,
+.caution {
+  background-image: url("images/warning_bg.jpg");
+  background-attachment: fixed;
+}
+
+.figure,
+.informalfigure,
+.example,
+.informalexample,
+.table,
+.informaltable {
+  background-image: url("images/figure_bg.jpg");
+  background-attachment: fixed;
+}
+
+*/
+h1,
+h2,
+h3,
+h4,
+h5,
+h6,
+h7{
+}
+
+/*
+Example of how to stick an image as part of the title.
+
+div.article .titlepage .title
+{
+  background-image: url("figures/white-on-black.png");
+  background-position: center;
+  background-repeat: repeat-x;
+}
+*/
+
+div.preface .titlepage .title,
+div.colophon .title,
+div.chapter .titlepage .title,
+div.article .titlepage .title
+{
+}
+
+div.section div.section .titlepage .title,
+div.sect2 .titlepage .title {
+    background: none;
+}
+
+
+h1.title {
+  background-color: transparent;
+  background-repeat: no-repeat;
+  height: 256px;
+  text-indent: -9000px;
+  overflow:hidden;
+}
+
+h2.subtitle {
+  background-color: transparent;
+  text-indent: -9000px;
+  overflow:hidden;
+  width: 0px;
+  display: none;
+}
+
+  /*************************************** /
+ /  pippin.gimp.org specific alterations  /
+/ ***************************************/
+
+/*
+div.heading, div.navheader {
+  color: #777;
+  font-size: 80%;
+  padding: 0;
+  margin: 0;
+  text-align: left;
+  position: absolute;
+  top: 0px;
+  left: 0px;
+  width: 100%;
+  height: 50px;
+  background: url('/gfx/heading_bg.png') transparent;
+  background-repeat: repeat-x;
+  background-attachment: fixed;
+  border: none;
+}
+
+div.heading a {
+  color: #444;
+}
+
+div.footing, div.navfooter {
+  border: none;
+  color: #ddd;
+  font-size: 80%;
+  text-align:right;
+
+  width: 100%;
+  padding-top: 10px;
+  position: absolute;
+  bottom: 0px;
+  left: 0px;
+
+  background: url('/gfx/footing_bg.png') transparent;
+}
+*/
+
+
+
+  /****************** /
+ /  nasty ie tweaks  /
+/ ******************/
+
+/*
+div.heading, div.navheader {
+  width:expression(document.body.clientWidth + "px");
+}
+
+div.footing, div.navfooter {
+  width:expression(document.body.clientWidth + "px");
+  margin-left:expression("-5em");
+}
+body {
+  padding:expression("4em 5em 0em 5em");
+}
+*/
+
+  /**************************************** /
+ / mozilla vendor specific css extensions  /
+/ ****************************************/
+/*
+div.navfooter, div.footing{
+  -moz-opacity: 0.8em;
+}
+
+div.figure,
+div.table,
+div.informalfigure,
+div.informaltable,
+div.informalexample,
+div.example,
+.tip,
+.warning,
+.caution,
+.note {
+  -moz-border-radius: 0.5em;
+}
+
+b.keycap,
+.keycap {
+  -moz-border-radius: 0.3em;
+}
+*/
+
+table tr td table tr td {
+  display: none;
+}
+
+
+hr {
+  display: none;
+}
+
+table {
+  border: 0em;
+}
+
+ .photo {
+  float: right;
+  margin-left:   1.5em;
+  margin-bottom: 1.5em;
+  margin-top: 0em;
+  max-width:      17em;
+  border:     1px solid gray;
+  padding:    3px;
+  background: white;
+}
+ .seperator {
+   padding-top: 2em;
+   clear: both;
+  }
+
+  #validators {
+      margin-top: 5em;
+      text-align: right;
+      color: #777;
+  }
+  @media print {
+      body {
+          font-size: 8pt;
+      }
+      .noprint {
+          display: none;
+      }
+  }
+
+
+.tip,
+.note {
+   background: #f0f0f2;
+   color: #333;
+   padding: 20px;
+   margin: 20px;
+}
+
+.tip h3,
+.note h3 {
+   padding: 0em;
+   margin: 0em;
+   font-size: 2em;
+   font-weight: bold;
+   color: #333;
+}
+
+.tip a,
+.note a {
+   color: #333;
+   text-decoration: underline;
+}
+
+.footnote {
+   font-size: small;
+   color: #333;
+}
+
+/* Changes the announcement text */
+.tip h3,
+.warning h3,
+.caution h3,
+.note h3 {
+   font-size:large;
+   color: #00557D;
+}

+ 88 - 0
bitbake/doc/bitbake-user-manual/bitbake-user-manual.xml

@@ -0,0 +1,88 @@
+<!DOCTYPE book PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
+"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
+
+<book id='bitbake-user-manual' lang='en'
+        xmlns:xi="http://www.w3.org/2003/XInclude"
+        xmlns="http://docbook.org/ns/docbook"
+        >
+    <bookinfo>
+
+        <mediaobject>
+            <imageobject>
+                <imagedata fileref='figures/bitbake-title.png'
+                    format='SVG'
+                    align='left' scalefit='1' width='100%'/>
+            </imageobject>
+        </mediaobject>
+
+        <title>
+            BitBake User Manual
+        </title>
+
+        <authorgroup>
+            <author>
+                <firstname>Richard Purdie, Chris Larson, and </firstname> <surname>Phil Blundell</surname>
+                <affiliation>
+                    <orgname>BitBake Community</orgname>
+                </affiliation>
+                <email>bitbake-devel@lists.openembedded.org</email>
+            </author>
+        </authorgroup>
+
+<!--
+# Add in some revision history if we want it here.
+        <revhistory>
+            <revision>
+                <revnumber>x.x</revnumber>
+                <date>dd month year</date>
+                <revremark>Some relevent comment</revremark>
+            </revision>
+            <revision>
+                <revnumber>x.x</revnumber>
+                <date>dd month year</date>
+                <revremark>Some relevent comment</revremark>
+            </revision>
+            <revision>
+                <revnumber>x.x</revnumber>
+                <date>dd month year</date>
+                <revremark>Some relevent comment</revremark>
+            </revision>
+            <revision>
+                <revnumber>x.x</revnumber>
+                <date>dd month year</date>
+                <revremark>Some relevent comment</revremark>
+            </revision>
+       </revhistory>
+-->
+
+        <copyright>
+            <year>2004-2015</year>
+            <holder>Richard Purdie</holder>
+            <holder>Chris Larson</holder>
+            <holder>and Phil Blundell</holder>
+        </copyright>
+
+        <legalnotice>
+            <para>
+                This work is licensed under the Creative Commons Attribution License.
+                To view a copy of this license, visit
+                <ulink url="http://creativecommons.org/licenses/by/2.5/">http://creativecommons.org/licenses/by/2.5/</ulink>
+                or send a letter to Creative Commons, 444 Castro Street,
+                Suite 900, Mountain View, California 94041, USA.
+            </para>
+        </legalnotice>
+    </bookinfo>
+
+    <xi:include href="bitbake-user-manual-intro.xml"/>
+
+    <xi:include href="bitbake-user-manual-execution.xml"/>
+
+    <xi:include href="bitbake-user-manual-metadata.xml"/>
+
+    <xi:include href="bitbake-user-manual-fetching.xml"/>
+
+    <xi:include href="bitbake-user-manual-ref-variables.xml"/>
+
+    <xi:include href="bitbake-user-manual-hello.xml"/>
+
+</book>

BIN
bitbake/doc/bitbake-user-manual/figures/bitbake-title.png


+ 281 - 0
bitbake/doc/bitbake-user-manual/html.css

@@ -0,0 +1,281 @@
+/* Feuille de style DocBook du projet Traduc.org                */
+/* DocBook CSS stylesheet of the Traduc.org project             */
+
+/* (c) Jean-Philippe Guérard - 14 août 2004                     */
+/* (c) Jean-Philippe Guérard - 14 August 2004                   */
+
+/* Cette feuille de style est libre, vous pouvez la             */
+/* redistribuer et la modifier selon les termes de la Licence   */
+/* Art Libre. Vous trouverez un exemplaire de cette Licence sur */
+/* http://tigreraye.org/Petit-guide-du-traducteur.html#licence-art-libre */
+
+/* This work of art is free, you can redistribute it and/or     */
+/* modify it according to terms of the Free Art license. You    */
+/* will find a specimen of this license on the Copyleft         */
+/* Attitude web site: http://artlibre.org as well as on other   */
+/* sites.                                                       */
+/* Please note that the French version of this licence as shown */
+/* on http://tigreraye.org/Petit-guide-du-traducteur.html#licence-art-libre */
+/* is only official licence of this document. The English       */
+/* is only provided to help you understand this licence.        */
+
+/* La dernière version de cette feuille de style est toujours   */
+/* disponible sur : http://tigreraye.org/style.css              */
+/* Elle est également disponible sur :                          */
+/* http://www.traduc.org/docs/HOWTO/lecture/style.css           */
+
+/* The latest version of this stylesheet is available from:     */
+/* http://tigreraye.org/style.css                               */
+/* It is also available on:                                     */
+/* http://www.traduc.org/docs/HOWTO/lecture/style.css           */
+
+/* N'hésitez pas à envoyer vos commentaires et corrections à    */
+/* Jean-Philippe Guérard <jean-philippe.guerard@tigreraye.org>  */
+
+/* Please send feedback and bug reports to                      */
+/* Jean-Philippe Guérard <jean-philippe.guerard@tigreraye.org>  */
+
+/* $Id: style.css,v 1.14 2004/09/10 20:12:09 fevrier Exp fevrier $ */
+
+/* Présentation générale du document */
+/* Overall document presentation */
+
+body {
+    /*
+    font-family: Apolline, "URW Palladio L", Garamond, jGaramond,
+                 "Bitstream Cyberbit", "Palatino Linotype", serif;
+     */
+    margin: 7%;
+    background-color: white;
+}
+
+/* Taille du texte */
+/* Text size */
+
+* { font-size: 100%; }
+
+/* Gestion des textes mis en relief imbriqués */
+/* Embedded emphasis */
+
+em { font-style: italic; }
+em em { font-style: normal; }
+em em em { font-style: italic; }
+
+/* Titres */
+/* Titles */
+
+h1 { font-size: 200%; font-weight: 900; }
+h2 { font-size: 160%; font-weight: 900; }
+h3 { font-size: 130%; font-weight: bold; }
+h4 { font-size: 115%; font-weight: bold; }
+h5 { font-size: 108%; font-weight: bold; }
+h6 {                  font-weight: bold; }
+
+/* Nom de famille en petites majuscules (uniquement en français) */
+/* Last names in small caps (for French only) */
+
+*[class~="surname"]:lang(fr) { font-variant: small-caps; }
+
+/* Blocs de citation */
+/* Quotation blocs */
+
+div[class~="blockquote"] {
+  border: solid 2px #AAA;
+  padding: 5px;
+  margin: 5px;
+}
+
+div[class~="blockquote"] > table {
+  border: none;
+}
+
+/* Blocs litéraux : fond gris clair */
+/* Literal blocs: light gray background */
+
+*[class~="literallayout"] {
+  background: #f0f0f0;
+  padding: 5px;
+  margin: 5px;
+}
+
+/* Programmes et captures texte : fond bleu clair */
+/* Listing and text screen snapshots: light blue background */
+
+*[class~="programlisting"], *[class~="screen"] {
+  background: #f0f0ff;
+  padding: 5px;
+  margin: 5px;
+}
+
+/* Les textes à remplacer sont surlignés en vert pâle */
+/* Replaceable text in highlighted in pale green */
+
+*[class~="replaceable"] { 
+    background-color: #98fb98;
+    font-style: normal; }
+
+/* Tables : fonds gris clair & bords simples */
+/* Tables: light gray background and solid borders */
+
+*[class~="table"] *[class~="title"] { width:100%; border: 0px; }
+
+table {
+    border: 1px solid #aaa;
+    border-collapse: collapse;
+    padding: 2px;
+    margin: 5px;
+}
+
+/* Listes simples en style table */
+/* Simples lists in table presentation */
+
+table[class~="simplelist"] {
+    background-color: #F0F0F0;
+    margin: 5px;
+    border: solid 1px #AAA;
+}
+
+table[class~="simplelist"] td {
+    border: solid 1px #AAA;
+}
+
+/* Les tables */
+/* Tables */
+
+*[class~="table"] table {
+    background-color: #F0F0F0;
+    border: solid 1px #AAA;
+}
+*[class~="informaltable"] table { background-color: #F0F0F0; }
+
+th,td {
+    vertical-align: baseline;
+    text-align: left;
+    padding: 0.1em 0.3em;
+    empty-cells: show; 
+}
+
+/* Alignement des colonnes */
+/* Colunms alignment */
+
+td[align=center] ,  th[align=center]  { text-align: center; }
+td[align=right] ,   th[align=right]   { text-align: right; }
+td[align=left] ,    th[align=left]    { text-align: left; }
+td[align=justify] , th[align=justify] { text-align: justify; }
+
+/* Pas de marge autour des images */
+/* No inside margins for images */
+
+img { border: 0; }
+
+/* Les liens ne sont pas soulignés */
+/* No underlines for links */
+
+:link , :visited , :active { text-decoration: none; }
+
+/* Prudence : cadre jaune et fond jaune clair */
+/* Caution: yellow border and light yellow background */
+
+*[class~="caution"] {
+    border: solid 2px yellow;
+    background-color: #ffffe0;
+    padding: 1em 6px 1em ;
+    margin: 5px;
+}
+
+*[class~="caution"] th {
+    vertical-align: middle
+}
+
+*[class~="caution"] table {
+    background-color: #ffffe0;
+    border: none;
+}
+
+/* Note importante : cadre jaune et fond jaune clair */
+/* Important: yellow border and light yellow background */
+
+*[class~="important"] {
+    border: solid 2px yellow;
+    background-color: #ffffe0;
+    padding: 1em 6px 1em;
+    margin: 5px;
+}
+
+*[class~="important"] th {
+    vertical-align: middle
+}
+
+*[class~="important"] table  {
+    background-color: #ffffe0;
+    border: none;
+}
+
+/* Mise en évidence : texte légèrement plus grand */
+/* Highlights: slightly larger texts */
+
+*[class~="highlights"] {
+    font-size:  110%;
+}
+
+/* Note : cadre bleu et fond bleu clair */
+/* Notes: blue border and light blue background */
+
+*[class~="note"]   {
+    border: solid 2px #7099C5;
+    background-color: #f0f0ff;
+    padding: 1em 6px 1em ;
+    margin: 5px;
+}
+
+*[class~="note"] th {
+    vertical-align: middle
+}
+
+*[class~="note"] table {
+    background-color: #f0f0ff;
+    border: none;
+}
+
+/* Astuce : cadre vert et fond vert clair */
+/* Tip: green border and light green background */
+
+*[class~="tip"] {
+    border: solid 2px #00ff00;
+    background-color: #f0ffff;
+    padding: 1em 6px 1em ;
+    margin: 5px;
+}
+
+*[class~="tip"] th {
+    vertical-align: middle;
+}
+
+*[class~="tip"] table {
+    background-color: #f0ffff;
+    border: none;
+}
+
+/* Avertissement : cadre rouge et fond rouge clair */
+/* Warning: red border and light red background */
+
+*[class~="warning"] {
+    border: solid 2px #ff0000;
+    background-color: #fff0f0; 
+    padding: 1em 6px 1em ;
+    margin: 5px;
+}
+
+*[class~="warning"] th {
+    vertical-align: middle;
+}
+                    
+
+*[class~="warning"] table {
+    background-color: #fff0f0;
+    border: none;
+}
+
+/* Fin */
+/* The End */
+

+ 142 - 0
bitbake/doc/bitbake.1

@@ -0,0 +1,142 @@
+.\"                                      Hey, EMACS: -*- nroff -*-
+.\" First parameter, NAME, should be all caps
+.\" Second parameter, SECTION, should be 1-8, maybe w/ subsection
+.\" other parameters are allowed: see man(7), man(1)
+.TH BITBAKE 1 "November 19, 2006"
+.\" Please adjust this date whenever revising the manpage.
+.\"
+.\" Some roff macros, for reference:
+.\" .nh        disable hyphenation
+.\" .hy        enable hyphenation
+.\" .ad l      left justify
+.\" .ad b      justify to both left and right margins
+.\" .nf        disable filling
+.\" .fi        enable filling
+.\" .br        insert line break
+.\" .sp <n>    insert n+1 empty lines
+.\" for manpage-specific macros, see man(7)
+.SH NAME
+BitBake \- simple tool for the execution of tasks
+.SH SYNOPSIS
+.B bitbake
+.RI [ options ] " packagenames"
+.br
+.SH DESCRIPTION
+This manual page documents briefly the
+.B bitbake
+command.
+.PP
+.\" TeX users may be more comfortable with the \fB<whatever>\fP and
+.\" \fI<whatever>\fP escape sequences to invode bold face and italics, 
+.\" respectively.
+\fBbitbake\fP is a program that executes the specified task (default is 'build')
+for a given set of BitBake files.
+.br
+It expects that BBFILES is defined, which is a space separated list of files to
+be executed. BBFILES does support wildcards.
+.br
+Default BBFILES are the .bb files in the current directory.
+.SH OPTIONS
+This program follow the usual GNU command line syntax, with long
+options starting with two dashes (`-').
+.TP
+.B \-h, \-\-help
+Show summary of options.
+.TP
+.B \-\-version
+Show version of program.
+.TP
+.B \-bBUILDFILE, \-\-buildfile=BUILDFILE
+execute the task against this .bb file, rather than a package from BBFILES.
+.TP
+.B \-k, \-\-continue
+continue as much as possible after an error. While the target that failed, and
+those that depend on it, cannot be remade, the other dependencies of these
+targets can be processed all the same.
+.TP
+.B \-a, \-\-tryaltconfigs
+continue with builds by trying to use alternative providers where possible.
+.TP
+.B \-f, \-\-force
+force run of specified cmd, regardless of stamp status
+.TP
+.B \-i, \-\-interactive
+drop into the interactive mode also called the BitBake shell.
+.TP
+.B \-cCMD, \-\-cmd=CMD
+Specify task to execute. Note that this only executes the specified task for
+the providee and the packages it depends on, i.e. 'compile' does not implicitly
+call stage for the dependencies (IOW: use only if you know what you are doing).
+Depending on the base.bbclass a listtasks task is defined and will show
+available tasks.
+.TP
+.B \-rFILE, \-\-read=FILE 
+read the specified file before bitbake.conf
+.TP
+.B \-v, \-\-verbose
+output more chit-chat to the terminal
+.TP
+.B \-D, \-\-debug
+Increase the debug level. You can specify this more than once.
+.TP
+.B \-n, \-\-dry-run
+don't execute, just go through the motions
+.TP
+.B \-p, \-\-parse-only
+quit after parsing the BB files (developers only)
+.TP
+.B \-s, \-\-show-versions
+show current and preferred versions of all packages
+.TP
+.B \-e, \-\-environment
+show the global or per-recipe environment (this is what used to be bbread)
+.TP
+.B \-g, \-\-graphviz
+emit the dependency trees of the specified packages in the dot syntax
+.TP
+.B \-IIGNORED\_DOT\_DEPS, \-\-ignore-deps=IGNORED_DOT_DEPS
+Stop processing at the given list of dependencies when generating dependency
+graphs. This can help to make the graph more appealing
+.TP
+.B \-lDEBUG_DOMAINS, \-\-log-domains=DEBUG_DOMAINS
+Show debug logging for the specified logging domains
+.TP
+.B \-P, \-\-profile
+profile the command and print a report
+.TP
+.B \-uUI, \-\-ui=UI
+User interface to use. Currently, hob, depexp, goggle or ncurses can be specified as UI.
+.TP
+.B \-tSERVERTYPE, \-\-servertype=SERVERTYPE
+Choose which server to use, none, process or xmlrpc.
+.TP
+.B \-\-revisions-changed
+Set the exit code depending on whether upstream floating revisions have changed or not.
+.TP
+.B \-\-server-only
+Run bitbake without UI,  the frontend can connect with bitbake server itself.
+.TP
+.B \-BBIND, \-\-bind=BIND
+The name/address for the bitbake server to bind to.
+.TP
+.B \-\-no\-setscene
+Do not run any setscene tasks, forces builds.
+
+.SH ENVIRONMENT VARIABLES
+bitbake uses the following environment variables to control its
+operation:
+.TP
+.B BITBAKE_UI
+The bitbake user interface; overridden by the \fB-u\fP commandline option.
+
+.SH AUTHORS
+BitBake was written by 
+Phil Blundell,
+Holger Freyther,
+Chris Larson,
+Mickey Lauer,
+Richard Purdie,
+Holger Schurig
+.PP
+This manual page was written by Marcin Juszkiewicz <marcin@hrw.one.pl>
+for the Debian project (but may be used by others).

+ 59 - 0
bitbake/doc/poky.ent

@@ -0,0 +1,59 @@
+<!ENTITY DISTRO "1.4">
+<!ENTITY DISTRO_NAME "tbd">
+<!ENTITY YOCTO_DOC_VERSION "1.4">
+<!ENTITY POKYVERSION "8.0">
+<!ENTITY YOCTO_POKY "poky-&DISTRO_NAME;-&POKYVERSION;">
+<!ENTITY COPYRIGHT_YEAR "2010-2013">
+<!ENTITY YOCTO_DL_URL "http://downloads.yoctoproject.org">
+<!ENTITY YOCTO_HOME_URL "http://www.yoctoproject.org">
+<!ENTITY YOCTO_LISTS_URL "http://lists.yoctoproject.org">
+<!ENTITY YOCTO_BUGZILLA_URL "http://bugzilla.yoctoproject.org">
+<!ENTITY YOCTO_WIKI_URL "https://wiki.yoctoproject.org">
+<!ENTITY YOCTO_AB_URL "http://autobuilder.yoctoproject.org">
+<!ENTITY YOCTO_GIT_URL "http://git.yoctoproject.org">
+<!ENTITY YOCTO_ADTREPO_URL "http://adtrepo.yoctoproject.org">
+<!ENTITY OE_HOME_URL "http://www.openembedded.org">
+<!ENTITY OE_LISTS_URL "http://lists.linuxtogo.org/cgi-bin/mailman">
+<!ENTITY OE_DOCS_URL "http://docs.openembedded.org">
+<!ENTITY OH_HOME_URL "http://o-hand.com">
+<!ENTITY BITBAKE_HOME_URL "http://developer.berlios.de/projects/bitbake/">
+<!ENTITY ECLIPSE_MAIN_URL "http://www.eclipse.org/downloads">
+<!ENTITY ECLIPSE_DL_URL "http://download.eclipse.org">
+<!ENTITY ECLIPSE_DL_PLUGIN_URL "&YOCTO_DL_URL;/releases/eclipse-plugin/&DISTRO;">
+<!ENTITY ECLIPSE_UPDATES_URL "&ECLIPSE_DL_URL;/tm/updates/3.3">
+<!ENTITY ECLIPSE_INDIGO_URL "&ECLIPSE_DL_URL;/releases/indigo">
+<!ENTITY ECLIPSE_JUNO_URL "&ECLIPSE_DL_URL;/releases/juno">
+<!ENTITY ECLIPSE_INDIGO_CDT_URL "&ECLIPSE_DL_URL;tools/cdt/releases/indigo">
+<!ENTITY YOCTO_DOCS_URL "&YOCTO_HOME_URL;/docs">
+<!ENTITY YOCTO_SOURCES_URL "&YOCTO_HOME_URL;/sources/">
+<!ENTITY YOCTO_AB_PORT_URL "&YOCTO_AB_URL;:8010">
+<!ENTITY YOCTO_AB_NIGHTLY_URL "&YOCTO_AB_URL;/nightly/">
+<!ENTITY YOCTO_POKY_URL "&YOCTO_DL_URL;/releases/poky/">
+<!ENTITY YOCTO_RELEASE_DL_URL "&YOCTO_DL_URL;/releases/yocto/yocto-&DISTRO;">
+<!ENTITY YOCTO_TOOLCHAIN_DL_URL "&YOCTO_RELEASE_DL_URL;/toolchain/">
+<!ENTITY YOCTO_ECLIPSE_DL_URL "&YOCTO_RELEASE_DL_URL;/eclipse-plugin/indigo;">
+<!ENTITY YOCTO_ADTINSTALLER_DL_URL "&YOCTO_RELEASE_DL_URL;/adt_installer">
+<!ENTITY YOCTO_POKY_DL_URL "&YOCTO_RELEASE_DL_URL;/&YOCTO_POKY;.tar.bz2">
+<!ENTITY YOCTO_MACHINES_DL_URL "&YOCTO_RELEASE_DL_URL;/machines">
+<!ENTITY YOCTO_QEMU_DL_URL "&YOCTO_MACHINES_DL_URL;/qemu">
+<!ENTITY YOCTO_PYTHON-i686_DL_URL "&YOCTO_DL_URL;/releases/miscsupport/python-nativesdk-standalone-i686.tar.bz2">
+<!ENTITY YOCTO_PYTHON-x86_64_DL_URL "&YOCTO_DL_URL;/releases/miscsupport/python-nativesdk-standalone-x86_64.tar.bz2">
+<!ENTITY YOCTO_DOCS_QS_URL "&YOCTO_DOCS_URL;/&YOCTO_DOC_VERSION;/yocto-project-qs/yocto-project-qs.html">
+<!ENTITY YOCTO_DOCS_ADT_URL "&YOCTO_DOCS_URL;/&YOCTO_DOC_VERSION;/adt-manual/adt-manual.html">
+<!ENTITY YOCTO_DOCS_REF_URL "&YOCTO_DOCS_URL;/&YOCTO_DOC_VERSION;/ref-manual/ref-manual.html">
+<!ENTITY YOCTO_DOCS_BSP_URL "&YOCTO_DOCS_URL;/&YOCTO_DOC_VERSION;/bsp-guide/bsp-guide.html">
+<!ENTITY YOCTO_DOCS_DEV_URL "&YOCTO_DOCS_URL;/&YOCTO_DOC_VERSION;/dev-manual/dev-manual.html">
+<!ENTITY YOCTO_DOCS_KERNEL_URL "&YOCTO_DOCS_URL;/&YOCTO_DOC_VERSION;/kernel-manual/kernel-manual.html">
+<!ENTITY YOCTO_ADTPATH_DIR "/opt/poky/&DISTRO;">
+<!ENTITY YOCTO_POKY_TARBALL "&YOCTO_POKY;.tar.bz2">
+<!ENTITY OE_INIT_PATH "&YOCTO_POKY;/oe-init-build-env">
+<!ENTITY OE_INIT_FILE "oe-init-build-env">
+<!ENTITY UBUNTU_HOST_PACKAGES_ESSENTIAL "gawk wget git-core diffstat unzip texinfo \
+     build-essential chrpath">
+<!ENTITY FEDORA_HOST_PACKAGES_ESSENTIAL "gawk make wget tar bzip2 gzip python unzip perl patch \
+     diffutils diffstat git cpp gcc gcc-c++ eglibc-devel texinfo chrpath \
+     ccache">
+<!ENTITY OPENSUSE_HOST_PACKAGES_ESSENTIAL "python gcc gcc-c++ git chrpath make wget python-xml \
+     diffstat texinfo python-curses">
+<!ENTITY CENTOS_HOST_PACKAGES_ESSENTIAL "gawk make wget tar bzip2 gzip python unzip perl patch \
+     diffutils diffstat git cpp gcc gcc-c++ glibc-devel texinfo chrpath">

BIN
bitbake/doc/template/Vera.ttf


File diff suppressed because it is too large
+ 0 - 0
bitbake/doc/template/Vera.xml


BIN
bitbake/doc/template/VeraMoBd.ttf


File diff suppressed because it is too large
+ 0 - 0
bitbake/doc/template/VeraMoBd.xml


BIN
bitbake/doc/template/VeraMono.ttf


File diff suppressed because it is too large
+ 0 - 0
bitbake/doc/template/VeraMono.xml


+ 39 - 0
bitbake/doc/template/component.title.xsl

@@ -0,0 +1,39 @@
+<xsl:stylesheet version="1.0"
+  xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+  xmlns:d="http://docbook.org/ns/docbook"
+  xmlns="http://www.w3.org/1999/xhtml"
+  exclude-result-prefixes="d">
+  
+  <xsl:template name="component.title">
+    <xsl:param name="node" select="."/>
+    
+    <xsl:variable name="level">
+      <xsl:choose>
+        <xsl:when test="ancestor::d:section">
+          <xsl:value-of select="count(ancestor::d:section)+1"/>
+        </xsl:when>
+        <xsl:when test="ancestor::d:sect5">6</xsl:when>
+        <xsl:when test="ancestor::d:sect4">5</xsl:when>
+        <xsl:when test="ancestor::d:sect3">4</xsl:when>
+        <xsl:when test="ancestor::d:sect2">3</xsl:when>
+        <xsl:when test="ancestor::d:sect1">2</xsl:when>
+        <xsl:otherwise>1</xsl:otherwise>
+      </xsl:choose>
+    </xsl:variable>
+    <xsl:element name="h{$level+1}" namespace="http://www.w3.org/1999/xhtml">
+      <xsl:attribute name="class">title</xsl:attribute>
+      <xsl:if test="$generate.id.attributes = 0">
+        <xsl:call-template name="anchor">
+          <xsl:with-param name="node" select="$node"/>
+          <xsl:with-param name="conditional" select="0"/>
+        </xsl:call-template>
+      </xsl:if>
+      <xsl:apply-templates select="$node" mode="object.title.markup">
+        <xsl:with-param name="allow-anchors" select="1"/>
+      </xsl:apply-templates>
+      <xsl:call-template name="permalink">
+        <xsl:with-param name="node" select="$node"/>
+      </xsl:call-template>
+    </xsl:element>
+  </xsl:template>
+</xsl:stylesheet>

+ 64 - 0
bitbake/doc/template/db-pdf.xsl

@@ -0,0 +1,64 @@
+<?xml version='1.0'?>
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns="http://www.w3.org/1999/xhtml" xmlns:fo="http://www.w3.org/1999/XSL/Format" version="1.0">
+  
+  <xsl:import href="http://docbook.sourceforge.net/release/xsl/current/fo/docbook.xsl" />
+
+  <!-- check project-plan.sh for how this is generated, needed to tweak 
+       the cover page     
+    -->
+  <xsl:include href="/tmp/titlepage.xsl"/> 
+
+  <!-- To force a page break in document, i.e per section add a 
+      <?hard-pagebreak?> tag.
+  -->
+ <xsl:template match="processing-instruction('hard-pagebreak')">
+   <fo:block break-before='page' />
+ </xsl:template>
+
+  <!--Fix for defualt indent getting TOC all wierd..
+      See http://sources.redhat.com/ml/docbook-apps/2005-q1/msg00455.html 
+      FIXME: must be a better fix
+    -->
+  <xsl:param name="body.start.indent" select="'0'"/>
+  <!--<xsl:param name="title.margin.left" select="'0'"/>-->
+
+  <!-- stop long-ish header titles getting wrapped -->
+  <xsl:param name="header.column.widths">1 10 1</xsl:param>
+
+  <!-- customise headers and footers a little --> 
+
+  <xsl:template name="head.sep.rule">
+   <xsl:if test="$header.rule != 0">
+     <xsl:attribute name="border-bottom-width">0.5pt</xsl:attribute>
+     <xsl:attribute name="border-bottom-style">solid</xsl:attribute>
+     <xsl:attribute name="border-bottom-color">#cccccc</xsl:attribute>
+   </xsl:if>
+  </xsl:template>
+
+  <xsl:template name="foot.sep.rule">
+    <xsl:if test="$footer.rule != 0">
+     <xsl:attribute name="border-top-width">0.5pt</xsl:attribute>
+     <xsl:attribute name="border-top-style">solid</xsl:attribute>
+     <xsl:attribute name="border-top-color">#cccccc</xsl:attribute>
+    </xsl:if>
+  </xsl:template>
+
+  <xsl:attribute-set name="header.content.properties">
+    <xsl:attribute name="color">#cccccc</xsl:attribute>
+  </xsl:attribute-set>
+
+  <xsl:attribute-set name="footer.content.properties">
+    <xsl:attribute name="color">#cccccc</xsl:attribute>
+  </xsl:attribute-set>
+
+ 
+  <!-- general settings -->
+
+  <xsl:param name="fop1.extensions" select="1"></xsl:param>
+  <xsl:param name="paper.type" select="'A4'"></xsl:param>
+  <xsl:param name="section.autolabel" select="1"></xsl:param>
+  <xsl:param name="body.font.family" select="'verasans'"></xsl:param>
+  <xsl:param name="title.font.family" select="'verasans'"></xsl:param>
+  <xsl:param name="monospace.font.family" select="'veramono'"></xsl:param>
+
+</xsl:stylesheet>

+ 25 - 0
bitbake/doc/template/division.title.xsl

@@ -0,0 +1,25 @@
+<xsl:stylesheet version="1.0"
+  xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+  xmlns:d="http://docbook.org/ns/docbook"
+  xmlns="http://www.w3.org/1999/xhtml"
+  exclude-result-prefixes="d">
+  
+  <xsl:template name="division.title">
+    <xsl:param name="node" select="."/>
+    
+    <h1>
+      <xsl:attribute name="class">title</xsl:attribute>
+      <xsl:call-template name="anchor">
+        <xsl:with-param name="node" select="$node"/>
+        <xsl:with-param name="conditional" select="0"/>
+      </xsl:call-template>
+      <xsl:apply-templates select="$node" mode="object.title.markup">
+        <xsl:with-param name="allow-anchors" select="1"/>
+      </xsl:apply-templates>
+      <xsl:call-template name="permalink">
+        <xsl:with-param name="node" select="$node"/>
+      </xsl:call-template>
+    </h1>
+  </xsl:template>
+</xsl:stylesheet>
+

BIN
bitbake/doc/template/draft.png


+ 58 - 0
bitbake/doc/template/fop-config.xml

@@ -0,0 +1,58 @@
+<fop version="1.0">
+
+  <!-- Strict user configuration -->
+  <strict-configuration>true</strict-configuration>
+
+  <!-- Strict FO validation -->
+  <strict-validation>true</strict-validation>
+
+   <!--
+    Set the baseDir so common/openedhand.svg references in plans still
+    work ok. Note, relative file references to current dir should still work.
+    -->	
+  <base>../template</base>
+  <font-base>../template</font-base>
+ 
+  <!-- Source resolution in dpi (dots/pixels per inch) for determining the
+       size of pixels in SVG and bitmap images, default: 72dpi -->
+  <!-- <source-resolution>72</source-resolution> -->
+  <!-- Target resolution in dpi (dots/pixels per inch) for specifying the
+       target resolution for generated bitmaps, default: 72dpi -->
+  <!-- <target-resolution>72</target-resolution> -->
+ 
+  <!-- default page-height and page-width, in case
+       value is specified as auto -->
+  <default-page-settings height="11in" width="8.26in"/> 
+ 
+  <!-- <use-cache>false</use-cache> -->
+ 
+  <renderers>
+    <renderer mime="application/pdf">
+      <fonts>
+        <font  metrics-file="VeraMono.xml"
+               kerning="yes" 
+               embed-url="VeraMono.ttf">
+          <font-triplet name="veramono" style="normal" weight="normal"/>
+        </font>
+
+        <font  metrics-file="VeraMoBd.xml"
+               kerning="yes" 
+               embed-url="VeraMoBd.ttf">
+          <font-triplet name="veramono" style="normal" weight="bold"/>
+        </font>
+
+        <font  metrics-file="Vera.xml"
+               kerning="yes" 
+               embed-url="Vera.ttf">
+          <font-triplet name="verasans" style="normal" weight="normal"/>
+          <font-triplet name="verasans" style="normal" weight="bold"/>
+          <font-triplet name="verasans" style="italic" weight="normal"/>
+          <font-triplet name="verasans" style="italic" weight="bold"/>
+        </font>
+        
+        <auto-detect/>
+      </fonts>
+    </renderer>
+  </renderers>
+</fop>
+

+ 21 - 0
bitbake/doc/template/formal.object.heading.xsl

@@ -0,0 +1,21 @@
+<xsl:stylesheet version="1.0"
+  xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+  xmlns:d="http://docbook.org/ns/docbook"
+  xmlns="http://www.w3.org/1999/xhtml"
+  exclude-result-prefixes="d">
+  
+  <xsl:template name="formal.object.heading">
+    <xsl:param name="object" select="."/>
+    <xsl:param name="title">
+      <xsl:apply-templates select="$object" mode="object.title.markup">
+        <xsl:with-param name="allow-anchors" select="1"/>
+      </xsl:apply-templates>
+    </xsl:param>
+    <p class="title">
+      <b><xsl:copy-of select="$title"/></b>
+      <xsl:call-template name="permalink">
+        <xsl:with-param name="node" select="$object"/>
+      </xsl:call-template>
+    </p>
+  </xsl:template>
+</xsl:stylesheet>

+ 14 - 0
bitbake/doc/template/gloss-permalinks.xsl

@@ -0,0 +1,14 @@
+<xsl:stylesheet version="1.0"
+  xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+  xmlns:d="http://docbook.org/ns/docbook"
+  xmlns="http://www.w3.org/1999/xhtml">
+
+  <xsl:template match="glossentry/glossterm">
+    <xsl:apply-imports/>
+    <xsl:if test="$generate.permalink != 0">
+      <xsl:call-template name="permalink">
+        <xsl:with-param name="node" select=".."/>
+      </xsl:call-template>
+    </xsl:if>
+  </xsl:template>
+</xsl:stylesheet>

+ 25 - 0
bitbake/doc/template/permalinks.xsl

@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xsl:stylesheet version="1.0"
+  xmlns="http://www.w3.org/1999/xhtml"
+  xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+  <xsl:param name="generate.permalink" select="1"/>
+  <xsl:param name="permalink.text">¶</xsl:param>
+
+  <xsl:template name="permalink">
+    <xsl:param name="node"/>
+
+    <xsl:if test="$generate.permalink != '0'">
+      <span class="permalink">
+        <a alt="Permalink" title="Permalink">
+          <xsl:attribute name="href">
+            <xsl:call-template name="href.target">
+              <xsl:with-param name="object"  select="$node"/>
+            </xsl:call-template>
+          </xsl:attribute>
+          <xsl:copy-of select="$permalink.text"/>
+        </a>
+      </span>
+    </xsl:if>
+  </xsl:template>
+</xsl:stylesheet>

+ 55 - 0
bitbake/doc/template/section.title.xsl

@@ -0,0 +1,55 @@
+<xsl:stylesheet version="1.0"
+  xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+  xmlns:d="http://docbook.org/ns/docbook"
+  xmlns="http://www.w3.org/1999/xhtml" exclude-result-prefixes="d">
+
+  <xsl:template name="section.title">
+    <xsl:variable name="section"
+      select="(ancestor::section |
+               ancestor::simplesect|
+               ancestor::sect1|
+               ancestor::sect2|
+               ancestor::sect3|
+               ancestor::sect4|
+               ancestor::sect5)[last()]"/>
+
+    <xsl:variable name="renderas">
+      <xsl:choose>
+        <xsl:when test="$section/@renderas = 'sect1'">1</xsl:when>
+        <xsl:when test="$section/@renderas = 'sect2'">2</xsl:when>
+        <xsl:when test="$section/@renderas = 'sect3'">3</xsl:when>
+        <xsl:when test="$section/@renderas = 'sect4'">4</xsl:when>
+        <xsl:when test="$section/@renderas = 'sect5'">5</xsl:when>
+        <xsl:otherwise><xsl:value-of select="''"/></xsl:otherwise>
+      </xsl:choose>
+    </xsl:variable>
+
+    <xsl:variable name="level">
+      <xsl:choose>
+        <xsl:when test="$renderas != ''">
+          <xsl:value-of select="$renderas"/>
+        </xsl:when>
+        <xsl:otherwise>
+          <xsl:call-template name="section.level">
+            <xsl:with-param name="node" select="$section"/>
+          </xsl:call-template>
+        </xsl:otherwise>
+      </xsl:choose>
+    </xsl:variable>
+
+    <xsl:call-template name="section.heading">
+      <xsl:with-param name="section" select="$section"/>
+      <xsl:with-param name="level" select="$level"/>
+      <xsl:with-param name="title">
+        <xsl:apply-templates select="$section" mode="object.title.markup">
+          <xsl:with-param name="allow-anchors" select="1"/>
+        </xsl:apply-templates>
+        <xsl:if test="$level &gt; 0">
+          <xsl:call-template name="permalink">
+            <xsl:with-param name="node" select="$section"/>
+          </xsl:call-template>
+        </xsl:if>
+      </xsl:with-param>
+    </xsl:call-template>
+  </xsl:template>
+</xsl:stylesheet>

+ 1259 - 0
bitbake/doc/template/titlepage.templates.xml

@@ -0,0 +1,1259 @@
+<!DOCTYPE t:templates [
+<!ENTITY hsize0 "10pt">
+<!ENTITY hsize1 "12pt">
+<!ENTITY hsize2 "14.4pt">
+<!ENTITY hsize3 "17.28pt">
+<!ENTITY hsize4 "20.736pt">
+<!ENTITY hsize5 "24.8832pt">
+<!ENTITY hsize0space "7.5pt"> <!-- 0.75 * hsize0 -->
+<!ENTITY hsize1space "9pt"> <!-- 0.75 * hsize1 -->
+<!ENTITY hsize2space "10.8pt"> <!-- 0.75 * hsize2 -->
+<!ENTITY hsize3space "12.96pt"> <!-- 0.75 * hsize3 -->
+<!ENTITY hsize4space "15.552pt"> <!-- 0.75 * hsize4 -->
+<!ENTITY hsize5space "18.6624pt"> <!-- 0.75 * hsize5 -->
+]>
+<t:templates xmlns:t="http://nwalsh.com/docbook/xsl/template/1.0"
+	     xmlns:param="http://nwalsh.com/docbook/xsl/template/1.0/param"
+             xmlns:fo="http://www.w3.org/1999/XSL/Format"
+             xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+
+<!-- ********************************************************************
+     $Id: titlepage.templates.xml,v 1.23 2003/12/16 00:30:49 bobstayton Exp $
+     ********************************************************************
+
+     This file is part of the DocBook XSL Stylesheet distribution.
+     See ../README or http://docbook.sf.net/ for copyright
+     and other information.
+
+     ******************************************************************** -->
+
+<!-- ==================================================================== -->
+
+<t:titlepage t:element="article" t:wrapper="fo:block"
+             font-family="{$title.fontset}">
+
+  <t:titlepage-content t:side="recto"
+             text-align="center">
+
+    <mediaobject/>
+
+    <title t:named-template="component.title"
+	   param:node="ancestor-or-self::article[1]"
+	   keep-with-next="always"
+	   font-size="&hsize5;"
+	   font-weight="bold"/>
+
+    <subtitle param:node="ancestor-or-self::article[1]"
+	   keep-with-next="always"
+	   font-size="&hsize3;"
+	   font-weight="bold"
+       space-after="0.8em"/>
+
+    <corpauthor space-before="0.5em"
+                font-size="&hsize3;"/>
+    <authorgroup space-before="0.5em"
+                 font-size="&hsize2;"/>
+    <author space-before="0.5em"
+            font-size="&hsize2;"
+            space-after="0.8em"/>
+
+    <email font-size="&hsize2;"/>
+
+    <othercredit space-before="0.5em"/>
+    <releaseinfo space-before="0.5em"/>
+    <copyright space-before="0.5em"/>
+    <legalnotice text-align="start"
+                 margin-left="0.5in"
+                 margin-right="0.5in"
+                 font-family="{$body.fontset}"/>
+    <pubdate space-before="0.5em"/>
+	<para></para>
+    <revision space-before="0.5em"/>
+    <revhistory space-before="0.5em"/>
+    <abstract space-before="0.5em"
+	      text-align="start"
+	      margin-left="0.5in"
+              margin-right="0.5in"
+              font-family="{$body.fontset}"/>
+
+    <para></para>
+  </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<!-- ==================================================================== -->
+
+<t:titlepage t:element="set" t:wrapper="fo:block">
+  <t:titlepage-content t:side="recto">
+      <title
+	     t:named-template="division.title"
+	     param:node="ancestor-or-self::set[1]"
+	     text-align="center"
+	     font-size="&hsize5;"
+	     space-before="&hsize5space;"
+	     font-weight="bold"
+	     font-family="{$title.fontset}"/>
+    <subtitle
+	      font-family="{$title.fontset}"
+	      text-align="center"/>
+    <corpauthor/>
+    <authorgroup/>
+    <author/>
+    <othercredit/>
+    <releaseinfo/>
+    <copyright/>
+    <legalnotice/>
+    <pubdate/>
+    <revision/>
+    <revhistory/>
+    <abstract/>
+  </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<!-- ==================================================================== -->
+
+  <t:titlepage t:element="book" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+
+      <mediaobject/>
+
+<!--
+
+# If you leave this block of code in then the text title in the
+# <title>BitBake User Manual</title> statement of the
+# bitbake-user-manual.xml file is rendered on the title page below the
+# image.  Commenting it out gets it out of there yet allows it
+# to be retained in the tab text for the HTML version of the
+# manual.
+
+      <title
+	     t:named-template="division.title"
+	     param:node="ancestor-or-self::book[1]"
+	     text-align="center"
+	     font-size="&hsize5;"
+	     space-before="&hsize5space;"
+	     font-weight="bold"
+	     font-family="{$title.fontset}"/>
+-->
+      <subtitle
+		text-align="center"
+		font-size="&hsize4;"
+		space-before="&hsize4space;"
+		font-family="{$title.fontset}"/>
+      <corpauthor font-size="&hsize3;"
+		  keep-with-next="always"
+		  space-before="2in"/>
+      <authorgroup space-before="2in"/>
+      <author font-size="&hsize3;"
+	      space-before="&hsize2space;"
+	      keep-with-next="always"/>
+    </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+<!--
+# If you leave this block of code in then the text title in the
+# <title>BitBake User Manual</title> statement of the
+# bitbake-user-manual.xml file is rendered on the title page below the
+# image.  Commenting it out gets it out of there yet allows it
+# to be retained in the tab text for the HTML version of the
+# manual.
+
+      <title
+	     t:named-template="book.verso.title"
+	     font-size="&hsize2;"
+	     font-weight="bold"
+	     font-family="{$title.fontset}"/>
+-->
+      <corpauthor/>
+      <authorgroup t:named-template="verso.authorgroup"/>
+      <author/>
+      <othercredit/>
+      <pubdate space-before="1em"/>
+      <copyright/>
+      <abstract/>
+      <legalnotice font-size="8pt"/>
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+      <fo:block break-after="page"/>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+      <fo:block break-after="page"/>
+  </t:titlepage-before>
+</t:titlepage>
+
+<!-- ==================================================================== -->
+
+<t:titlepage t:element="part" t:wrapper="fo:block">
+  <t:titlepage-content t:side="recto">
+      <title
+	     t:named-template="division.title"
+	     param:node="ancestor-or-self::part[1]"
+	     text-align="center"
+	     font-size="&hsize5;"
+	     space-before="&hsize5space;"
+	     font-weight="bold"
+	     font-family="{$title.fontset}"/>
+    <subtitle
+	      text-align="center"
+	      font-size="&hsize4;"
+	      space-before="&hsize4space;"
+	      font-weight='bold'
+	      font-style='italic'
+	      font-family="{$title.fontset}"/>
+  </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<t:titlepage t:element="partintro" t:wrapper="fo:block">
+  <t:titlepage-content t:side="recto">
+    <title
+	   text-align="center"
+	   font-size="&hsize5;"
+	   font-weight="bold"
+	   space-before="1em"
+	   font-family="{$title.fontset}"/>
+    <subtitle
+	      text-align="center"
+	      font-size="&hsize2;"
+	      font-weight="bold"
+	      font-style="italic"
+	      font-family="{$title.fontset}"/>
+    <corpauthor/>
+    <authorgroup/>
+    <author/>
+    <othercredit/>
+    <releaseinfo/>
+    <copyright/>
+    <legalnotice/>
+    <pubdate/>
+    <revision/>
+    <revhistory/>
+    <abstract/>
+  </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<!-- ==================================================================== -->
+
+<t:titlepage t:element="reference" t:wrapper="fo:block">
+  <t:titlepage-content t:side="recto">
+      <title
+	     t:named-template="division.title"
+	     param:node="ancestor-or-self::reference[1]"
+	     text-align="center"
+	     font-size="&hsize5;"
+	     space-before="&hsize5space;"
+	     font-weight="bold"
+	     font-family="{$title.fontset}"/>
+    <subtitle
+	      font-family="{$title.fontset}"
+	      text-align="center"/>
+    <corpauthor/>
+    <authorgroup/>
+    <author/>
+    <othercredit/>
+    <releaseinfo/>
+    <copyright/>
+    <legalnotice/>
+    <pubdate/>
+    <revision/>
+    <revhistory/>
+    <abstract/>
+  </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<!-- ==================================================================== -->
+
+<t:titlepage t:element="refsynopsisdiv" t:wrapper="fo:block">
+  <t:titlepage-content t:side="recto">
+    <title
+	   font-family="{$title.fontset}"/>
+  </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<!-- ==================================================================== -->
+
+<t:titlepage t:element="refsection" t:wrapper="fo:block">
+  <t:titlepage-content t:side="recto">
+    <title
+	   font-family="{$title.fontset}"/>
+  </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<!-- ==================================================================== -->
+
+<t:titlepage t:element="refsect1" t:wrapper="fo:block">
+  <t:titlepage-content t:side="recto">
+    <title
+	   font-family="{$title.fontset}"/>
+  </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<!-- ==================================================================== -->
+
+<t:titlepage t:element="refsect2" t:wrapper="fo:block">
+  <t:titlepage-content t:side="recto">
+    <title
+	   font-family="{$title.fontset}"/>
+  </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<!-- ==================================================================== -->
+
+<t:titlepage t:element="refsect3" t:wrapper="fo:block">
+  <t:titlepage-content t:side="recto">
+    <title
+	   font-family="{$title.fontset}"/>
+  </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<!-- ==================================================================== -->
+
+  <t:titlepage t:element="dedication" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+      <title
+	     t:force="1"
+	     t:named-template="component.title"
+	     param:node="ancestor-or-self::dedication[1]"
+	     margin-left="{$title.margin.left}"
+	     font-size="&hsize5;"
+	     font-family="{$title.fontset}"
+	     font-weight="bold"/>
+      <subtitle
+		font-family="{$title.fontset}"/>
+    </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<!-- ==================================================================== -->
+
+  <t:titlepage t:element="preface" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+      <title
+	     t:force="1"
+	     t:named-template="component.title"
+	     param:node="ancestor-or-self::preface[1]"
+	     margin-left="{$title.margin.left}"
+	     font-size="&hsize5;"
+	     font-family="{$title.fontset}"
+	     font-weight="bold"/>
+      <subtitle
+		font-family="{$title.fontset}"/>
+      <corpauthor/>
+      <authorgroup/>
+      <author/>
+      <othercredit/>
+      <releaseinfo/>
+      <copyright/>
+      <legalnotice/>
+      <pubdate/>
+      <revision/>
+      <revhistory/>
+      <abstract/>
+    </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<!-- ==================================================================== -->
+
+  <t:titlepage t:element="chapter" t:wrapper="fo:block"
+               font-family="{$title.fontset}">
+    <t:titlepage-content t:side="recto" margin-left="{$title.margin.left}">
+      <title t:named-template="component.title"
+	     param:node="ancestor-or-self::chapter[1]"
+	     font-size="&hsize5;"
+	     font-weight="bold"/>
+
+      <subtitle space-before="0.5em"
+		font-style="italic"
+		font-size="&hsize2;"
+		font-weight="bold"/>
+
+      <corpauthor  space-before="0.5em"
+	           space-after="0.5em"
+                   font-size="&hsize2;"/>
+
+      <authorgroup space-before="0.5em"
+	           space-after="0.5em"
+                   font-size="&hsize2;"/>
+
+      <author      space-before="0.5em"
+	           space-after="0.5em"
+                   font-size="&hsize2;"/>
+
+      <othercredit/>
+      <releaseinfo/>
+      <copyright/>
+      <legalnotice/>
+      <pubdate/>
+      <revision/>
+      <revhistory/>
+      <abstract/>
+    </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<!-- ==================================================================== -->
+
+  <t:titlepage t:element="appendix" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+      <title
+	     t:named-template="component.title"
+	     param:node="ancestor-or-self::appendix[1]"
+	     margin-left="{$title.margin.left}"
+	     font-size="&hsize5;"
+	     font-weight="bold"
+	     font-family="{$title.fontset}"/>
+      <subtitle
+		font-family="{$title.fontset}"/>
+      <corpauthor/>
+      <authorgroup/>
+      <author/>
+      <othercredit/>
+      <releaseinfo/>
+      <copyright/>
+      <legalnotice/>
+      <pubdate/>
+      <revision/>
+      <revhistory/>
+      <abstract/>
+    </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<!-- ==================================================================== -->
+
+<t:titlepage t:element="section" t:wrapper="fo:block">
+  <t:titlepage-content t:side="recto">
+    <title
+	   margin-left="{$title.margin.left}"
+	   font-family="{$title.fontset}"/>
+    <subtitle
+	      font-family="{$title.fontset}"/>
+    <corpauthor/>
+    <authorgroup/>
+    <author/>
+    <othercredit/>
+    <releaseinfo/>
+    <copyright/>
+    <legalnotice/>
+    <pubdate/>
+    <revision/>
+    <revhistory/>
+    <abstract/>
+  </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<t:titlepage t:element="sect1" t:wrapper="fo:block">
+  <t:titlepage-content t:side="recto">
+    <title
+	   margin-left="{$title.margin.left}"
+	   font-family="{$title.fontset}"/>
+    <subtitle
+	      font-family="{$title.fontset}"/>
+    <corpauthor/>
+    <authorgroup/>
+    <author/>
+    <othercredit/>
+    <releaseinfo/>
+    <copyright/>
+    <legalnotice/>
+    <pubdate/>
+    <revision/>
+    <revhistory/>
+    <abstract/>
+  </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<t:titlepage t:element="sect2" t:wrapper="fo:block">
+  <t:titlepage-content t:side="recto">
+    <title
+	   margin-left="{$title.margin.left}"
+	   font-family="{$title.fontset}"/>
+    <subtitle
+	      font-family="{$title.fontset}"/>
+    <corpauthor/>
+    <authorgroup/>
+    <author/>
+    <othercredit/>
+    <releaseinfo/>
+    <copyright/>
+    <legalnotice/>
+    <pubdate/>
+    <revision/>
+    <revhistory/>
+    <abstract/>
+  </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<t:titlepage t:element="sect3" t:wrapper="fo:block">
+  <t:titlepage-content t:side="recto">
+    <title
+	   margin-left="{$title.margin.left}"
+	   font-family="{$title.fontset}"/>
+    <subtitle
+	      font-family="{$title.fontset}"/>
+    <corpauthor/>
+    <authorgroup/>
+    <author/>
+    <othercredit/>
+    <releaseinfo/>
+    <copyright/>
+    <legalnotice/>
+    <pubdate/>
+    <revision/>
+    <revhistory/>
+    <abstract/>
+  </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<t:titlepage t:element="sect4" t:wrapper="fo:block">
+  <t:titlepage-content t:side="recto">
+    <title
+	   margin-left="{$title.margin.left}"
+	   font-family="{$title.fontset}"/>
+    <subtitle
+	      font-family="{$title.fontset}"/>
+    <corpauthor/>
+    <authorgroup/>
+    <author/>
+    <othercredit/>
+    <releaseinfo/>
+    <copyright/>
+    <legalnotice/>
+    <pubdate/>
+    <revision/>
+    <revhistory/>
+    <abstract/>
+  </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<t:titlepage t:element="sect5" t:wrapper="fo:block">
+  <t:titlepage-content t:side="recto">
+    <title
+	   margin-left="{$title.margin.left}"
+	   font-family="{$title.fontset}"/>
+    <subtitle
+	      font-family="{$title.fontset}"/>
+    <corpauthor/>
+    <authorgroup/>
+    <author/>
+    <othercredit/>
+    <releaseinfo/>
+    <copyright/>
+    <legalnotice/>
+    <pubdate/>
+    <revision/>
+    <revhistory/>
+    <abstract/>
+  </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<t:titlepage t:element="simplesect" t:wrapper="fo:block">
+  <t:titlepage-content t:side="recto">
+    <title
+	   margin-left="{$title.margin.left}"
+	   font-family="{$title.fontset}"/>
+    <subtitle
+	      font-family="{$title.fontset}"/>
+    <corpauthor/>
+    <authorgroup/>
+    <author/>
+    <othercredit/>
+    <releaseinfo/>
+    <copyright/>
+    <legalnotice/>
+    <pubdate/>
+    <revision/>
+    <revhistory/>
+    <abstract/>
+  </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<!-- ==================================================================== -->
+
+  <t:titlepage t:element="bibliography" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+      <title
+	     t:force="1"
+	     t:named-template="component.title"
+	     param:node="ancestor-or-self::bibliography[1]"
+	     margin-left="{$title.margin.left}"
+	     font-size="&hsize5;"
+	     font-family="{$title.fontset}"
+	     font-weight="bold"/>
+      <subtitle
+		font-family="{$title.fontset}"/>
+    </t:titlepage-content>
+
+    <t:titlepage-content t:side="verso">
+    </t:titlepage-content>
+
+    <t:titlepage-separator>
+    </t:titlepage-separator>
+
+    <t:titlepage-before t:side="recto">
+    </t:titlepage-before>
+
+    <t:titlepage-before t:side="verso">
+    </t:titlepage-before>
+  </t:titlepage>
+
+<!-- ==================================================================== -->
+
+  <t:titlepage t:element="bibliodiv" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+      <title t:named-template="component.title"
+	     param:node="ancestor-or-self::bibliodiv[1]"
+	     margin-left="{$title.margin.left}"
+	     font-size="&hsize4;"
+	     font-family="{$title.fontset}"
+	     font-weight="bold"/>
+      <subtitle
+		font-family="{$title.fontset}"/>
+    </t:titlepage-content>
+
+    <t:titlepage-content t:side="verso">
+    </t:titlepage-content>
+
+    <t:titlepage-separator>
+    </t:titlepage-separator>
+
+    <t:titlepage-before t:side="recto">
+    </t:titlepage-before>
+
+    <t:titlepage-before t:side="verso">
+    </t:titlepage-before>
+  </t:titlepage>
+
+<!-- ==================================================================== -->
+
+  <t:titlepage t:element="glossary" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+      <title
+	     t:force="1"
+	     t:named-template="component.title"
+	     param:node="ancestor-or-self::glossary[1]"
+	     margin-left="{$title.margin.left}"
+	     font-size="&hsize5;"
+	     font-family="{$title.fontset}"
+	     font-weight="bold"/>
+      <subtitle
+		font-family="{$title.fontset}"/>
+    </t:titlepage-content>
+
+    <t:titlepage-content t:side="verso">
+    </t:titlepage-content>
+
+    <t:titlepage-separator>
+    </t:titlepage-separator>
+
+    <t:titlepage-before t:side="recto">
+    </t:titlepage-before>
+
+    <t:titlepage-before t:side="verso">
+    </t:titlepage-before>
+  </t:titlepage>
+
+<!-- ==================================================================== -->
+
+  <t:titlepage t:element="glossdiv" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+      <title t:named-template="component.title"
+	     param:node="ancestor-or-self::glossdiv[1]"
+	     margin-left="{$title.margin.left}"
+	     font-size="&hsize4;"
+	     font-family="{$title.fontset}"
+	     font-weight="bold"/>
+      <subtitle
+		font-family="{$title.fontset}"/>
+    </t:titlepage-content>
+
+    <t:titlepage-content t:side="verso">
+    </t:titlepage-content>
+
+    <t:titlepage-separator>
+    </t:titlepage-separator>
+
+    <t:titlepage-before t:side="recto">
+    </t:titlepage-before>
+
+    <t:titlepage-before t:side="verso">
+    </t:titlepage-before>
+  </t:titlepage>
+
+<!-- ==================================================================== -->
+
+  <t:titlepage t:element="index" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+      <title
+	     t:force="1"
+	     t:named-template="component.title"
+	     param:node="ancestor-or-self::index[1]"
+             param:pagewide="1"
+	     margin-left="0pt"
+	     font-size="&hsize5;"
+	     font-family="{$title.fontset}"
+	     font-weight="bold"/>
+      <subtitle
+		font-family="{$title.fontset}"/>
+    </t:titlepage-content>
+
+    <t:titlepage-content t:side="verso">
+    </t:titlepage-content>
+
+    <t:titlepage-separator>
+    </t:titlepage-separator>
+
+    <t:titlepage-before t:side="recto">
+    </t:titlepage-before>
+
+    <t:titlepage-before t:side="verso">
+    </t:titlepage-before>
+  </t:titlepage>
+
+<!-- ==================================================================== -->
+
+  <!-- The indexdiv.title template is used so that manual and -->
+  <!-- automatically generated indexdiv titles get the same -->
+  <!-- formatting. -->
+
+  <t:titlepage t:element="indexdiv" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+      <title t:force="1"
+	     t:named-template="indexdiv.title"
+	     param:title="title"/>
+      <subtitle
+		font-family="{$title.fontset}"/>
+    </t:titlepage-content>
+
+    <t:titlepage-content t:side="verso">
+    </t:titlepage-content>
+
+    <t:titlepage-separator>
+    </t:titlepage-separator>
+
+    <t:titlepage-before t:side="recto">
+    </t:titlepage-before>
+
+    <t:titlepage-before t:side="verso">
+    </t:titlepage-before>
+  </t:titlepage>
+
+<!-- ==================================================================== -->
+
+  <t:titlepage t:element="setindex" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+      <title
+	     t:force="1"
+	     t:named-template="component.title"
+	     param:node="ancestor-or-self::setindex[1]"
+             param:pagewide="1"
+	     margin-left="0pt"
+	     font-size="&hsize5;"
+	     font-family="{$title.fontset}"
+	     font-weight="bold"/>
+      <subtitle
+		font-family="{$title.fontset}"/>
+    </t:titlepage-content>
+
+    <t:titlepage-content t:side="verso">
+    </t:titlepage-content>
+
+    <t:titlepage-separator>
+    </t:titlepage-separator>
+
+    <t:titlepage-before t:side="recto">
+    </t:titlepage-before>
+
+    <t:titlepage-before t:side="verso">
+    </t:titlepage-before>
+  </t:titlepage>
+
+<!-- ==================================================================== -->
+
+  <t:titlepage t:element="colophon" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+      <title
+	     t:force="1"
+	     t:named-template="component.title"
+	     param:node="ancestor-or-self::colophon[1]"
+	     margin-left="{$title.margin.left}"
+	     font-size="&hsize5;"
+	     font-family="{$title.fontset}"
+	     font-weight="bold"/>
+      <subtitle
+		font-family="{$title.fontset}"/>
+    </t:titlepage-content>
+
+  <t:titlepage-content t:side="verso">
+  </t:titlepage-content>
+
+  <t:titlepage-separator>
+  </t:titlepage-separator>
+
+  <t:titlepage-before t:side="recto">
+  </t:titlepage-before>
+
+  <t:titlepage-before t:side="verso">
+  </t:titlepage-before>
+</t:titlepage>
+
+<!-- ==================================================================== -->
+
+  <t:titlepage t:element="table.of.contents" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+      <title
+	     t:force="1"
+	     t:named-template="gentext"
+	     param:key="'TableofContents'"
+             space-before.minimum="1em"
+             space-before.optimum="1.5em"
+             space-before.maximum="2em"
+	     space-after="0.5em"
+	     margin-left="{$title.margin.left}"
+	     font-size="&hsize3;"
+	     font-weight="bold"
+	     font-family="{$title.fontset}"/>
+    </t:titlepage-content>
+
+    <t:titlepage-content t:side="verso">
+    </t:titlepage-content>
+
+    <t:titlepage-separator>
+    </t:titlepage-separator>
+
+    <t:titlepage-before t:side="recto">
+    </t:titlepage-before>
+
+    <t:titlepage-before t:side="verso">
+    </t:titlepage-before>
+  </t:titlepage>
+
+  <t:titlepage t:element="list.of.tables" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+      <title
+	     t:force="1"
+	     t:named-template="gentext"
+	     param:key="'ListofTables'"
+             space-before.minimum="1em"
+             space-before.optimum="1.5em"
+             space-before.maximum="2em"
+	     space-after="0.5em"
+	     margin-left="{$title.margin.left}"
+	     font-size="&hsize3;"
+	     font-weight="bold"
+	     font-family="{$title.fontset}"/>
+    </t:titlepage-content>
+
+    <t:titlepage-content t:side="verso">
+    </t:titlepage-content>
+
+    <t:titlepage-separator>
+    </t:titlepage-separator>
+
+    <t:titlepage-before t:side="recto">
+    </t:titlepage-before>
+
+    <t:titlepage-before t:side="verso">
+    </t:titlepage-before>
+  </t:titlepage>
+
+  <t:titlepage t:element="list.of.figures" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+      <title
+	     t:force="1"
+	     t:named-template="gentext"
+	     param:key="'ListofFigures'"
+             space-before.minimum="1em"
+             space-before.optimum="1.5em"
+             space-before.maximum="2em"
+	     space-after="0.5em"
+	     margin-left="{$title.margin.left}"
+	     font-size="&hsize3;"
+	     font-weight="bold"
+	     font-family="{$title.fontset}"/>
+    </t:titlepage-content>
+
+    <t:titlepage-content t:side="verso">
+    </t:titlepage-content>
+
+    <t:titlepage-separator>
+    </t:titlepage-separator>
+
+    <t:titlepage-before t:side="recto">
+    </t:titlepage-before>
+
+    <t:titlepage-before t:side="verso">
+    </t:titlepage-before>
+  </t:titlepage>
+
+  <t:titlepage t:element="list.of.examples" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+      <title
+	     t:force="1"
+	     t:named-template="gentext"
+	     param:key="'ListofExamples'"
+             space-before.minimum="1em"
+             space-before.optimum="1.5em"
+             space-before.maximum="2em"
+	     space-after="0.5em"
+	     margin-left="{$title.margin.left}"
+	     font-size="&hsize3;"
+	     font-weight="bold"
+	     font-family="{$title.fontset}"/>
+    </t:titlepage-content>
+
+    <t:titlepage-content t:side="verso">
+    </t:titlepage-content>
+
+    <t:titlepage-separator>
+    </t:titlepage-separator>
+
+    <t:titlepage-before t:side="recto">
+    </t:titlepage-before>
+
+    <t:titlepage-before t:side="verso">
+    </t:titlepage-before>
+  </t:titlepage>
+
+  <t:titlepage t:element="list.of.equations" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+      <title
+	     t:force="1"
+	     t:named-template="gentext"
+	     param:key="'ListofEquations'"
+             space-before.minimum="1em"
+             space-before.optimum="1.5em"
+             space-before.maximum="2em"
+	     space-after="0.5em"
+	     margin-left="{$title.margin.left}"
+	     font-size="&hsize3;"
+	     font-weight="bold"
+	     font-family="{$title.fontset}"/>
+    </t:titlepage-content>
+
+    <t:titlepage-content t:side="verso">
+    </t:titlepage-content>
+
+    <t:titlepage-separator>
+    </t:titlepage-separator>
+
+    <t:titlepage-before t:side="recto">
+    </t:titlepage-before>
+
+    <t:titlepage-before t:side="verso">
+    </t:titlepage-before>
+  </t:titlepage>
+
+  <t:titlepage t:element="list.of.procedures" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+      <title
+	     t:force="1"
+	     t:named-template="gentext"
+	     param:key="'ListofProcedures'"
+             space-before.minimum="1em"
+             space-before.optimum="1.5em"
+             space-before.maximum="2em"
+	     space-after="0.5em"
+	     margin-left="{$title.margin.left}"
+	     font-size="&hsize3;"
+	     font-weight="bold"
+	     font-family="{$title.fontset}"/>
+    </t:titlepage-content>
+
+    <t:titlepage-content t:side="verso">
+    </t:titlepage-content>
+
+    <t:titlepage-separator>
+    </t:titlepage-separator>
+
+    <t:titlepage-before t:side="recto">
+    </t:titlepage-before>
+
+    <t:titlepage-before t:side="verso">
+    </t:titlepage-before>
+  </t:titlepage>
+
+  <t:titlepage t:element="list.of.unknowns" t:wrapper="fo:block">
+    <t:titlepage-content t:side="recto">
+      <title
+	     t:force="1"
+	     t:named-template="gentext"
+	     param:key="'ListofUnknown'"
+             space-before.minimum="1em"
+             space-before.optimum="1.5em"
+             space-before.maximum="2em"
+	     space-after="0.5em"
+	     margin-left="{$title.margin.left}"
+	     font-size="&hsize3;"
+	     font-weight="bold"
+	     font-family="{$title.fontset}"/>
+    </t:titlepage-content>
+
+    <t:titlepage-content t:side="verso">
+    </t:titlepage-content>
+
+    <t:titlepage-separator>
+    </t:titlepage-separator>
+
+    <t:titlepage-before t:side="recto">
+    </t:titlepage-before>
+
+    <t:titlepage-before t:side="verso">
+    </t:titlepage-before>
+  </t:titlepage>
+
+<!-- ==================================================================== -->
+
+</t:templates>

+ 51 - 0
bitbake/doc/tools/docbook-to-pdf

@@ -0,0 +1,51 @@
+#!/bin/sh
+	
+if [ -z "$1" -o -z "$2" ]; then
+   echo "usage: [-v] $0 <docbook file> <templatedir>"
+   echo
+   echo "*NOTE* you need xsltproc, fop and nwalsh docbook stylesheets" 
+   echo "       installed for this to work!"
+   echo
+   exit 0
+fi
+
+FO=`echo $1 | sed s/.xml/.fo/` || exit 1
+PDF=`echo $1 | sed s/.xml/.pdf/` || exit 1
+TEMPLATEDIR=$2
+
+##
+# These URI should be rewritten by your distribution's xml catalog to
+# match your localy installed XSL stylesheets.
+XSL_BASE_URI="http://docbook.sourceforge.net/release/xsl/current"
+
+# Creates a temporary XSL stylesheet based on titlepage.xsl
+xsltproc -o /tmp/titlepage.xsl                                           \
+	 --xinclude                                                      \
+         $XSL_BASE_URI/template/titlepage.xsl \
+         $TEMPLATEDIR/titlepage.templates.xml || exit 1
+
+# Creates the file needed for FOP
+xsltproc --xinclude                    \
+	 --stringparam hyphenate false \
+	 --stringparam formal.title.placement "figure after" \
+	 --stringparam ulink.show 1 \
+         --stringparam  body.font.master  9 \
+         --stringparam  title.font.master  11 \
+         --stringparam draft.watermark.image "$TEMPLATEDIR/draft.png" \
+         --stringparam  chapter.autolabel 1 \
+         --stringparam  appendix.autolabel A \
+         --stringparam  section.autolabel 1 \
+         --stringparam  section.label.includes.component.label 1 \
+         --output $FO               \
+         $TEMPLATEDIR/db-pdf.xsl    \
+	 $1                 || exit 1
+
+# Invokes the Java version of FOP.  Uses the additional configuration file common/fop-config.xml
+fop -c $TEMPLATEDIR/fop-config.xml -fo $FO -pdf $PDF       || exit 1
+
+rm -f $FO
+rm -f  /tmp/titlepage.xsl
+
+echo
+echo " #### Success! $PDF ready. ####"
+echo

+ 323 - 0
bitbake/lib/bb/COW.py

@@ -0,0 +1,323 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# This is a copy on write dictionary and set which abuses classes to try and be nice and fast.
+#
+# Copyright (C) 2006 Tim Amsell
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+#Please Note:
+# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW.
+# Assign a file to __warn__ to get warnings about slow operations.
+#
+
+from __future__ import print_function
+import copy
+import types
+ImmutableTypes = (
+    types.NoneType,
+    bool,
+    complex,
+    float,
+    int,
+    long,
+    tuple,
+    frozenset,
+    basestring
+)
+
+MUTABLE = "__mutable__"
+
+class COWMeta(type):
+    pass
+
+class COWDictMeta(COWMeta):
+    __warn__ = False
+    __hasmutable__ = False
+    __marker__ = tuple()
+
+    def __str__(cls):
+        # FIXME: I have magic numbers!
+        return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3)
+    __repr__ = __str__
+
+    def cow(cls):
+        class C(cls):
+            __count__ = cls.__count__ + 1
+        return C
+    copy = cow
+    __call__ = cow
+
+    def __setitem__(cls, key, value):
+        if not isinstance(value, ImmutableTypes):
+            if not isinstance(value, COWMeta):
+                cls.__hasmutable__ = True
+            key += MUTABLE
+        setattr(cls, key, value)
+
+    def __getmutable__(cls, key, readonly=False):
+        nkey = key + MUTABLE
+        try:
+            return cls.__dict__[nkey]
+        except KeyError:
+            pass
+
+        value = getattr(cls, nkey)
+        if readonly:
+            return value
+
+        if not cls.__warn__ is False and not isinstance(value, COWMeta):
+            print("Warning: Doing a copy because %s is a mutable type." % key, file=cls.__warn__)
+        try:
+            value = value.copy()
+        except AttributeError as e:
+            value = copy.copy(value)
+        setattr(cls, nkey, value)
+        return value
+
+    __getmarker__ = []
+    def __getreadonly__(cls, key, default=__getmarker__):
+        """\
+        Get a value (even if mutable) which you promise not to change.
+        """
+        return cls.__getitem__(key, default, True)
+
+    def __getitem__(cls, key, default=__getmarker__, readonly=False):
+        try:
+            try:
+                value = getattr(cls, key)
+            except AttributeError:
+                value = cls.__getmutable__(key, readonly)
+
+            # This is for values which have been deleted
+            if value is cls.__marker__:
+                raise AttributeError("key %s does not exist." % key)
+
+            return value
+        except AttributeError as e:
+            if not default is cls.__getmarker__:
+                return default
+
+            raise KeyError(str(e))
+
+    def __delitem__(cls, key):
+        cls.__setitem__(key, cls.__marker__)
+
+    def __revertitem__(cls, key):
+        if not cls.__dict__.has_key(key):
+            key += MUTABLE
+        delattr(cls, key)
+
+    def __contains__(cls, key):
+        return cls.has_key(key)
+
+    def has_key(cls, key):
+        value = cls.__getreadonly__(key, cls.__marker__)
+        if value is cls.__marker__:
+            return False
+        return True
+
+    def iter(cls, type, readonly=False):
+        for key in dir(cls):
+            if key.startswith("__"):
+                continue
+
+            if key.endswith(MUTABLE):
+                key = key[:-len(MUTABLE)]
+
+            if type == "keys":
+                yield key
+
+            try:
+                if readonly:
+                    value = cls.__getreadonly__(key)
+                else:
+                    value = cls[key]
+            except KeyError:
+                continue
+
+            if type == "values":
+                yield value
+            if type == "items":
+                yield (key, value)
+        raise StopIteration()
+
+    def iterkeys(cls):
+        return cls.iter("keys")
+    def itervalues(cls, readonly=False):
+        if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
+            print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__)
+        return cls.iter("values", readonly)
+    def iteritems(cls, readonly=False):
+        if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
+            print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__)
+        return cls.iter("items", readonly)
+
+class COWSetMeta(COWDictMeta):
+    def __str__(cls):
+        # FIXME: I have magic numbers!
+        return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) -3)
+    __repr__ = __str__
+
+    def cow(cls):
+        class C(cls):
+            __count__ = cls.__count__ + 1
+        return C
+
+    def add(cls, value):
+        COWDictMeta.__setitem__(cls, repr(hash(value)), value)
+
+    def remove(cls, value):
+        COWDictMeta.__delitem__(cls, repr(hash(value)))
+
+    def __in__(cls, value):
+        return COWDictMeta.has_key(repr(hash(value)))
+
+    def iterkeys(cls):
+        raise TypeError("sets don't have keys")
+
+    def iteritems(cls):
+        raise TypeError("sets don't have 'items'")
+
+# These are the actual classes you use!
+class COWDictBase(object):
+    __metaclass__ = COWDictMeta
+    __count__ = 0
+
+class COWSetBase(object):
+    __metaclass__ = COWSetMeta
+    __count__ = 0
+
+if __name__ == "__main__":
+    import sys
+    COWDictBase.__warn__ = sys.stderr
+    a = COWDictBase()
+    print("a", a)
+
+    a['a'] = 'a'
+    a['b'] = 'b'
+    a['dict'] = {}
+
+    b = a.copy()
+    print("b", b)
+    b['c'] = 'b'
+
+    print()
+
+    print("a", a)
+    for x in a.iteritems():
+        print(x)
+    print("--")
+    print("b", b)
+    for x in b.iteritems():
+        print(x)
+    print()
+
+    b['dict']['a'] = 'b'
+    b['a'] = 'c'
+
+    print("a", a)
+    for x in a.iteritems():
+        print(x)
+    print("--")
+    print("b", b)
+    for x in b.iteritems():
+        print(x)
+    print()
+
+    try:
+        b['dict2']
+    except KeyError as e:
+        print("Okay!")
+
+    a['set'] = COWSetBase()
+    a['set'].add("o1")
+    a['set'].add("o1")
+    a['set'].add("o2")
+
+    print("a", a)
+    for x in a['set'].itervalues():
+        print(x)
+    print("--")
+    print("b", b)
+    for x in b['set'].itervalues():
+        print(x)
+    print()
+
+    b['set'].add('o3')
+
+    print("a", a)
+    for x in a['set'].itervalues():
+        print(x)
+    print("--")
+    print("b", b)
+    for x in b['set'].itervalues():
+        print(x)
+    print()
+
+    a['set2'] = set()
+    a['set2'].add("o1")
+    a['set2'].add("o1")
+    a['set2'].add("o2")
+
+    print("a", a)
+    for x in a.iteritems():
+        print(x)
+    print("--")
+    print("b", b)
+    for x in b.iteritems(readonly=True):
+        print(x)
+    print()
+
+    del b['b']
+    try:
+        print(b['b'])
+    except KeyError:
+        print("Yay! deleted key raises error")
+
+    if b.has_key('b'):
+        print("Boo!")
+    else:
+        print("Yay - has_key with delete works!")
+
+    print("a", a)
+    for x in a.iteritems():
+        print(x)
+    print("--")
+    print("b", b)
+    for x in b.iteritems(readonly=True):
+        print(x)
+    print()
+
+    b.__revertitem__('b')
+
+    print("a", a)
+    for x in a.iteritems():
+        print(x)
+    print("--")
+    print("b", b)
+    for x in b.iteritems(readonly=True):
+        print(x)
+    print()
+
+    b.__revertitem__('dict')
+    print("a", a)
+    for x in a.iteritems():
+        print(x)
+    print("--")
+    print("b", b)
+    for x in b.iteritems(readonly=True):
+        print(x)
+    print()

+ 144 - 0
bitbake/lib/bb/__init__.py

@@ -0,0 +1,144 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# BitBake Build System Python Library
+#
+# Copyright (C) 2003  Holger Schurig
+# Copyright (C) 2003, 2004  Chris Larson
+#
+# Based on Gentoo's portage.py.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+__version__ = "1.29.0"
+
+import sys
+if sys.version_info < (2, 7, 3):
+    raise RuntimeError("Sorry, python 2.7.3 or later is required for this version of bitbake")
+
+
+class BBHandledException(Exception):
+    """
+    The big dilemma for generic bitbake code is what information to give the user
+    when an exception occurs. Any exception inheriting this base exception class
+    has already provided information to the user via some 'fired' message type such as
+    an explicitly fired event using bb.fire, or a bb.error message. If bitbake 
+    encounters an exception derived from this class, no backtrace or other information 
+    will be given to the user, its assumed the earlier event provided the relevant information.
+    """
+    pass
+
+import os
+import logging
+
+
+class NullHandler(logging.Handler):
+    def emit(self, record):
+        pass
+
+Logger = logging.getLoggerClass()
+class BBLogger(Logger):
+    def __init__(self, name):
+        if name.split(".")[0] == "BitBake":
+            self.debug = self.bbdebug
+        Logger.__init__(self, name)
+
+    def bbdebug(self, level, msg, *args, **kwargs):
+        return self.log(logging.DEBUG - level + 1, msg, *args, **kwargs)
+
+    def plain(self, msg, *args, **kwargs):
+        return self.log(logging.INFO + 1, msg, *args, **kwargs)
+
+    def verbose(self, msg, *args, **kwargs):
+        return self.log(logging.INFO - 1, msg, *args, **kwargs)
+
+logging.raiseExceptions = False
+logging.setLoggerClass(BBLogger)
+
+logger = logging.getLogger("BitBake")
+logger.addHandler(NullHandler())
+logger.setLevel(logging.DEBUG - 2)
+
+mainlogger = logging.getLogger("BitBake.Main")
+
+# This has to be imported after the setLoggerClass, as the import of bb.msg
+# can result in construction of the various loggers.
+import bb.msg
+
+from bb import fetch2 as fetch
+sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
+
+# Messaging convenience functions
+def plain(*args):
+    mainlogger.plain(''.join(args))
+
+def debug(lvl, *args):
+    if isinstance(lvl, basestring):
+        mainlogger.warn("Passed invalid debug level '%s' to bb.debug", lvl)
+        args = (lvl,) + args
+        lvl = 1
+    mainlogger.debug(lvl, ''.join(args))
+
+def note(*args):
+    mainlogger.info(''.join(args))
+
+def warn(*args):
+    mainlogger.warn(''.join(args))
+
+def error(*args, **kwargs):
+    mainlogger.error(''.join(args), extra=kwargs)
+
+def fatal(*args, **kwargs):
+    mainlogger.critical(''.join(args), extra=kwargs)
+    raise BBHandledException()
+
+def deprecated(func, name=None, advice=""):
+    """This is a decorator which can be used to mark functions
+    as deprecated. It will result in a warning being emitted
+    when the function is used."""
+    import warnings
+
+    if advice:
+        advice = ": %s" % advice
+    if name is None:
+        name = func.__name__
+
+    def newFunc(*args, **kwargs):
+        warnings.warn("Call to deprecated function %s%s." % (name,
+                                                             advice),
+                      category=DeprecationWarning,
+                      stacklevel=2)
+        return func(*args, **kwargs)
+    newFunc.__name__ = func.__name__
+    newFunc.__doc__ = func.__doc__
+    newFunc.__dict__.update(func.__dict__)
+    return newFunc
+
+# For compatibility
+def deprecate_import(current, modulename, fromlist, renames = None):
+    """Import objects from one module into another, wrapping them with a DeprecationWarning"""
+    import sys
+
+    module = __import__(modulename, fromlist = fromlist)
+    for position, objname in enumerate(fromlist):
+        obj = getattr(module, objname)
+        newobj = deprecated(obj, "{0}.{1}".format(current, objname),
+                            "Please use {0}.{1} instead".format(modulename, objname))
+        if renames:
+            newname = renames[position]
+        else:
+            newname = objname
+
+        setattr(sys.modules[current], newname, newobj)
+

+ 777 - 0
bitbake/lib/bb/build.py

@@ -0,0 +1,777 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# BitBake 'Build' implementation
+#
+# Core code for function execution and task handling in the
+# BitBake build tools.
+#
+# Copyright (C) 2003, 2004  Chris Larson
+#
+# Based on Gentoo's portage.py.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import sys
+import logging
+import shlex
+import glob
+import time
+import stat
+import bb
+import bb.msg
+import bb.process
+from contextlib import nested
+from bb import event, utils
+
+bblogger = logging.getLogger('BitBake')
+logger = logging.getLogger('BitBake.Build')
+
+NULL = open(os.devnull, 'r+')
+
+__mtime_cache = {}
+
+def cached_mtime_noerror(f):
+    if f not in __mtime_cache:
+        try:
+            __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
+        except OSError:
+            return 0
+    return __mtime_cache[f]
+
+def reset_cache():
+    global __mtime_cache
+    __mtime_cache = {}
+
+# When we execute a Python function, we'd like certain things
+# in all namespaces, hence we add them to __builtins__.
+# If we do not do this and use the exec globals, they will
+# not be available to subfunctions.
+__builtins__['bb'] = bb
+__builtins__['os'] = os
+
+class FuncFailed(Exception):
+    def __init__(self, name = None, logfile = None):
+        self.logfile = logfile
+        self.name = name
+        if name:
+            self.msg = 'Function failed: %s' % name
+        else:
+            self.msg = "Function failed"
+
+    def __str__(self):
+        if self.logfile and os.path.exists(self.logfile):
+            msg = ("%s (log file is located at %s)" %
+                   (self.msg, self.logfile))
+        else:
+            msg = self.msg
+        return msg
+
+class TaskBase(event.Event):
+    """Base class for task events"""
+
+    def __init__(self, t, logfile, d):
+        self._task = t
+        self._package = d.getVar("PF", True)
+        self.taskfile = d.getVar("FILE", True)
+        self.taskname = self._task
+        self.logfile = logfile
+        self.time = time.time()
+        event.Event.__init__(self)
+        self._message = "recipe %s: task %s: %s" % (d.getVar("PF", True), t, self.getDisplayName())
+
+    def getTask(self):
+        return self._task
+
+    def setTask(self, task):
+        self._task = task
+
+    def getDisplayName(self):
+        return bb.event.getName(self)[4:]
+
+    task = property(getTask, setTask, None, "task property")
+
+class TaskStarted(TaskBase):
+    """Task execution started"""
+    def __init__(self, t, logfile, taskflags, d):
+        super(TaskStarted, self).__init__(t, logfile, d)
+        self.taskflags = taskflags
+
+class TaskSucceeded(TaskBase):
+    """Task execution completed"""
+
+class TaskFailed(TaskBase):
+    """Task execution failed"""
+
+    def __init__(self, task, logfile, metadata, errprinted = False):
+        self.errprinted = errprinted
+        super(TaskFailed, self).__init__(task, logfile, metadata)
+
+class TaskFailedSilent(TaskBase):
+    """Task execution failed (silently)"""
+    def getDisplayName(self):
+        # Don't need to tell the user it was silent
+        return "Failed"
+
+class TaskInvalid(TaskBase):
+
+    def __init__(self, task, metadata):
+        super(TaskInvalid, self).__init__(task, None, metadata)
+        self._message = "No such task '%s'" % task
+
+
+class LogTee(object):
+    def __init__(self, logger, outfile):
+        self.outfile = outfile
+        self.logger = logger
+        self.name = self.outfile.name
+
+    def write(self, string):
+        self.logger.plain(string)
+        self.outfile.write(string)
+
+    def __enter__(self):
+        self.outfile.__enter__()
+        return self
+
+    def __exit__(self, *excinfo):
+        self.outfile.__exit__(*excinfo)
+
+    def __repr__(self):
+        return '<LogTee {0}>'.format(self.name)
+    def flush(self):
+        self.outfile.flush()
+
+def exec_func(func, d, dirs = None):
+    """Execute a BB 'function'"""
+
+    body = d.getVar(func, False)
+    if not body:
+        if body is None:
+            logger.warn("Function %s doesn't exist", func)
+        return
+
+    flags = d.getVarFlags(func)
+    cleandirs = flags.get('cleandirs')
+    if cleandirs:
+        for cdir in d.expand(cleandirs).split():
+            bb.utils.remove(cdir, True)
+            bb.utils.mkdirhier(cdir)
+
+    if dirs is None:
+        dirs = flags.get('dirs')
+        if dirs:
+            dirs = d.expand(dirs).split()
+
+    if dirs:
+        for adir in dirs:
+            bb.utils.mkdirhier(adir)
+        adir = dirs[-1]
+    else:
+        adir = d.getVar('B', True)
+        bb.utils.mkdirhier(adir)
+
+    ispython = flags.get('python')
+
+    lockflag = flags.get('lockfiles')
+    if lockflag:
+        lockfiles = [f for f in d.expand(lockflag).split()]
+    else:
+        lockfiles = None
+
+    tempdir = d.getVar('T', True)
+
+    # or func allows items to be executed outside of the normal
+    # task set, such as buildhistory
+    task = d.getVar('BB_RUNTASK', True) or func
+    if task == func:
+        taskfunc = task
+    else:
+        taskfunc = "%s.%s" % (task, func)
+
+    runfmt = d.getVar('BB_RUNFMT', True) or "run.{func}.{pid}"
+    runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid())
+    runfile = os.path.join(tempdir, runfn)
+    bb.utils.mkdirhier(os.path.dirname(runfile))
+
+    # Setup the courtesy link to the runfn, only for tasks
+    # we create the link 'just' before the run script is created
+    # if we create it after, and if the run script fails, then the
+    # link won't be created as an exception would be fired.
+    if task == func:
+        runlink = os.path.join(tempdir, 'run.{0}'.format(task))
+        if runlink:
+            bb.utils.remove(runlink)
+
+            try:
+                os.symlink(runfn, runlink)
+            except OSError:
+                pass
+
+    with bb.utils.fileslocked(lockfiles):
+        if ispython:
+            exec_func_python(func, d, runfile, cwd=adir)
+        else:
+            exec_func_shell(func, d, runfile, cwd=adir)
+
+_functionfmt = """
+{function}(d)
+"""
+logformatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
+def exec_func_python(func, d, runfile, cwd=None):
+    """Execute a python BB 'function'"""
+
+    code = _functionfmt.format(function=func)
+    bb.utils.mkdirhier(os.path.dirname(runfile))
+    with open(runfile, 'w') as script:
+        bb.data.emit_func_python(func, script, d)
+
+    if cwd:
+        try:
+            olddir = os.getcwd()
+        except OSError:
+            olddir = None
+        os.chdir(cwd)
+
+    bb.debug(2, "Executing python function %s" % func)
+
+    try:
+        text = "def %s(d):\n%s" % (func, d.getVar(func, False))
+        fn = d.getVarFlag(func, "filename", False)
+        lineno = int(d.getVarFlag(func, "lineno", False))
+        bb.methodpool.insert_method(func, text, fn, lineno - 1)
+
+        comp = utils.better_compile(code, func, "exec_python_func() autogenerated")
+        utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated")
+    except (bb.parse.SkipRecipe, bb.build.FuncFailed):
+        raise
+    except:
+        raise FuncFailed(func, None)
+    finally:
+        bb.debug(2, "Python function %s finished" % func)
+
+        if cwd and olddir:
+            try:
+                os.chdir(olddir)
+            except OSError:
+                pass
+
+def shell_trap_code():
+    return '''#!/bin/sh\n
+# Emit a useful diagnostic if something fails:
+bb_exit_handler() {
+    ret=$?
+    case $ret in
+    0)  ;;
+    *)  case $BASH_VERSION in
+        "") echo "WARNING: exit code $ret from a shell command.";;
+        *)  echo "WARNING: ${BASH_SOURCE[0]}:${BASH_LINENO[0]} exit $ret from '$BASH_COMMAND'";;
+        esac
+        exit $ret
+    esac
+}
+trap 'bb_exit_handler' 0
+set -e
+'''
+
+def exec_func_shell(func, d, runfile, cwd=None):
+    """Execute a shell function from the metadata
+
+    Note on directory behavior.  The 'dirs' varflag should contain a list
+    of the directories you need created prior to execution.  The last
+    item in the list is where we will chdir/cd to.
+    """
+
+    # Don't let the emitted shell script override PWD
+    d.delVarFlag('PWD', 'export')
+
+    with open(runfile, 'w') as script:
+        script.write(shell_trap_code())
+
+        bb.data.emit_func(func, script, d)
+
+        if bb.msg.loggerVerboseLogs:
+            script.write("set -x\n")
+        if cwd:
+            script.write("cd '%s'\n" % cwd)
+        script.write("%s\n" % func)
+        script.write('''
+# cleanup
+ret=$?
+trap '' 0
+exit $ret
+''')
+
+    os.chmod(runfile, 0775)
+
+    cmd = runfile
+    if d.getVarFlag(func, 'fakeroot', False):
+        fakerootcmd = d.getVar('FAKEROOT', True)
+        if fakerootcmd:
+            cmd = [fakerootcmd, runfile]
+
+    if bb.msg.loggerDefaultVerbose:
+        logfile = LogTee(logger, sys.stdout)
+    else:
+        logfile = sys.stdout
+
+    def readfifo(data):
+        lines = data.split('\0')
+        for line in lines:
+            splitval = line.split(' ', 1)
+            cmd = splitval[0]
+            if len(splitval) > 1:
+                value = splitval[1]
+            else:
+                value = ''
+            if cmd == 'bbplain':
+                bb.plain(value)
+            elif cmd == 'bbnote':
+                bb.note(value)
+            elif cmd == 'bbwarn':
+                bb.warn(value)
+            elif cmd == 'bberror':
+                bb.error(value)
+            elif cmd == 'bbfatal':
+                # The caller will call exit themselves, so bb.error() is
+                # what we want here rather than bb.fatal()
+                bb.error(value)
+            elif cmd == 'bbfatal_log':
+                bb.error(value, forcelog=True)
+            elif cmd == 'bbdebug':
+                splitval = value.split(' ', 1)
+                level = int(splitval[0])
+                value = splitval[1]
+                bb.debug(level, value)
+
+    tempdir = d.getVar('T', True)
+    fifopath = os.path.join(tempdir, 'fifo.%s' % os.getpid())
+    if os.path.exists(fifopath):
+        os.unlink(fifopath)
+    os.mkfifo(fifopath)
+    with open(fifopath, 'r+') as fifo:
+        try:
+            bb.debug(2, "Executing shell function %s" % func)
+
+            try:
+                with open(os.devnull, 'r+') as stdin:
+                    bb.process.run(cmd, shell=False, stdin=stdin, log=logfile, extrafiles=[(fifo,readfifo)])
+            except bb.process.CmdError:
+                logfn = d.getVar('BB_LOGFILE', True)
+                raise FuncFailed(func, logfn)
+        finally:
+            os.unlink(fifopath)
+
+    bb.debug(2, "Shell function %s finished" % func)
+
+def _task_data(fn, task, d):
+    localdata = bb.data.createCopy(d)
+    localdata.setVar('BB_FILENAME', fn)
+    localdata.setVar('BB_CURRENTTASK', task[3:])
+    localdata.setVar('OVERRIDES', 'task-%s:%s' %
+                     (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False)))
+    localdata.finalize()
+    bb.data.expandKeys(localdata)
+    return localdata
+
+def _exec_task(fn, task, d, quieterr):
+    """Execute a BB 'task'
+
+    Execution of a task involves a bit more setup than executing a function,
+    running it with its own local metadata, and with some useful variables set.
+    """
+    if not d.getVarFlag(task, 'task', False):
+        event.fire(TaskInvalid(task, d), d)
+        logger.error("No such task: %s" % task)
+        return 1
+
+    logger.debug(1, "Executing task %s", task)
+
+    localdata = _task_data(fn, task, d)
+    tempdir = localdata.getVar('T', True)
+    if not tempdir:
+        bb.fatal("T variable not set, unable to build")
+
+    # Change nice level if we're asked to
+    nice = localdata.getVar("BB_TASK_NICE_LEVEL", True)
+    if nice:
+        curnice = os.nice(0)
+        nice = int(nice) - curnice
+        newnice = os.nice(nice)
+        logger.debug(1, "Renice to %s " % newnice)
+    ionice = localdata.getVar("BB_TASK_IONICE_LEVEL", True)
+    if ionice:
+        try:
+            cls, prio = ionice.split(".", 1)
+            bb.utils.ioprio_set(os.getpid(), int(cls), int(prio))
+        except:
+            bb.warn("Invalid ionice level %s" % ionice)
+
+    bb.utils.mkdirhier(tempdir)
+
+    # Determine the logfile to generate
+    logfmt = localdata.getVar('BB_LOGFMT', True) or 'log.{task}.{pid}'
+    logbase = logfmt.format(task=task, pid=os.getpid())
+
+    # Document the order of the tasks...
+    logorder = os.path.join(tempdir, 'log.task_order')
+    try:
+        with open(logorder, 'a') as logorderfile:
+            logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase))
+    except OSError:
+        logger.exception("Opening log file '%s'", logorder)
+        pass
+
+    # Setup the courtesy link to the logfn
+    loglink = os.path.join(tempdir, 'log.{0}'.format(task))
+    logfn = os.path.join(tempdir, logbase)
+    if loglink:
+        bb.utils.remove(loglink)
+
+        try:
+           os.symlink(logbase, loglink)
+        except OSError:
+           pass
+
+    prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True)
+    postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True)
+
+    class ErrorCheckHandler(logging.Handler):
+        def __init__(self):
+            self.triggered = False
+            logging.Handler.__init__(self, logging.ERROR)
+        def emit(self, record):
+            if getattr(record, 'forcelog', False):
+                self.triggered = False
+            else:
+                self.triggered = True
+
+    # Handle logfiles
+    si = open('/dev/null', 'r')
+    try:
+        bb.utils.mkdirhier(os.path.dirname(logfn))
+        logfile = open(logfn, 'w')
+    except OSError:
+        logger.exception("Opening log file '%s'", logfn)
+        pass
+
+    # Dup the existing fds so we dont lose them
+    osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()]
+    oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()]
+    ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()]
+
+    # Replace those fds with our own
+    os.dup2(si.fileno(), osi[1])
+    os.dup2(logfile.fileno(), oso[1])
+    os.dup2(logfile.fileno(), ose[1])
+
+    # Ensure Python logging goes to the logfile
+    handler = logging.StreamHandler(logfile)
+    handler.setFormatter(logformatter)
+    # Always enable full debug output into task logfiles
+    handler.setLevel(logging.DEBUG - 2)
+    bblogger.addHandler(handler)
+
+    errchk = ErrorCheckHandler()
+    bblogger.addHandler(errchk)
+
+    localdata.setVar('BB_LOGFILE', logfn)
+    localdata.setVar('BB_RUNTASK', task)
+
+    flags = localdata.getVarFlags(task)
+
+    event.fire(TaskStarted(task, logfn, flags, localdata), localdata)
+    try:
+        for func in (prefuncs or '').split():
+            exec_func(func, localdata)
+        exec_func(task, localdata)
+        for func in (postfuncs or '').split():
+            exec_func(func, localdata)
+    except FuncFailed as exc:
+        if quieterr:
+            event.fire(TaskFailedSilent(task, logfn, localdata), localdata)
+        else:
+            errprinted = errchk.triggered
+            logger.error(str(exc))
+            event.fire(TaskFailed(task, logfn, localdata, errprinted), localdata)
+        return 1
+    finally:
+        sys.stdout.flush()
+        sys.stderr.flush()
+
+        bblogger.removeHandler(handler)
+
+        # Restore the backup fds
+        os.dup2(osi[0], osi[1])
+        os.dup2(oso[0], oso[1])
+        os.dup2(ose[0], ose[1])
+
+        # Close the backup fds
+        os.close(osi[0])
+        os.close(oso[0])
+        os.close(ose[0])
+        si.close()
+
+        logfile.close()
+        if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
+            logger.debug(2, "Zero size logfn %s, removing", logfn)
+            bb.utils.remove(logfn)
+            bb.utils.remove(loglink)
+    event.fire(TaskSucceeded(task, logfn, localdata), localdata)
+
+    if not localdata.getVarFlag(task, 'nostamp', False) and not localdata.getVarFlag(task, 'selfstamp', False):
+        make_stamp(task, localdata)
+
+    return 0
+
+def exec_task(fn, task, d, profile = False):
+    try:
+        quieterr = False
+        if d.getVarFlag(task, "quieterrors", False) is not None:
+            quieterr = True
+
+        if profile:
+            profname = "profile-%s.log" % (d.getVar("PN", True) + "-" + task)
+            try:
+                import cProfile as profile
+            except:
+                import profile
+            prof = profile.Profile()
+            ret = profile.Profile.runcall(prof, _exec_task, fn, task, d, quieterr)
+            prof.dump_stats(profname)
+            bb.utils.process_profilelog(profname)
+
+            return ret
+        else:
+            return _exec_task(fn, task, d, quieterr)
+
+    except Exception:
+        from traceback import format_exc
+        if not quieterr:
+            logger.error("Build of %s failed" % (task))
+            logger.error(format_exc())
+            failedevent = TaskFailed(task, None, d, True)
+            event.fire(failedevent, d)
+        return 1
+
+def stamp_internal(taskname, d, file_name, baseonly=False):
+    """
+    Internal stamp helper function
+    Makes sure the stamp directory exists
+    Returns the stamp path+filename
+
+    In the bitbake core, d can be a CacheData and file_name will be set.
+    When called in task context, d will be a data store, file_name will not be set
+    """
+    taskflagname = taskname
+    if taskname.endswith("_setscene") and taskname != "do_setscene":
+        taskflagname = taskname.replace("_setscene", "")
+
+    if file_name:
+        stamp = d.stamp[file_name]
+        extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
+    else:
+        stamp = d.getVar('STAMP', True)
+        file_name = d.getVar('BB_FILENAME', True)
+        extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
+
+    if baseonly:
+        return stamp
+
+    if not stamp:
+        return
+
+    stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo)
+
+    stampdir = os.path.dirname(stamp)
+    if cached_mtime_noerror(stampdir) == 0:
+        bb.utils.mkdirhier(stampdir)
+
+    return stamp
+
+def stamp_cleanmask_internal(taskname, d, file_name):
+    """
+    Internal stamp helper function to generate stamp cleaning mask
+    Returns the stamp path+filename
+
+    In the bitbake core, d can be a CacheData and file_name will be set.
+    When called in task context, d will be a data store, file_name will not be set
+    """
+    taskflagname = taskname
+    if taskname.endswith("_setscene") and taskname != "do_setscene":
+        taskflagname = taskname.replace("_setscene", "")
+
+    if file_name:
+        stamp = d.stampclean[file_name]
+        extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
+    else:
+        stamp = d.getVar('STAMPCLEAN', True)
+        file_name = d.getVar('BB_FILENAME', True)
+        extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
+
+    if not stamp:
+        return []
+
+    cleanmask = bb.parse.siggen.stampcleanmask(stamp, file_name, taskname, extrainfo)
+
+    return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
+
+def make_stamp(task, d, file_name = None):
+    """
+    Creates/updates a stamp for a given task
+    (d can be a data dict or dataCache)
+    """
+    cleanmask = stamp_cleanmask_internal(task, d, file_name)
+    for mask in cleanmask:
+        for name in glob.glob(mask):
+            # Preserve sigdata files in the stamps directory
+            if "sigdata" in name:
+                continue
+            # Preserve taint files in the stamps directory
+            if name.endswith('.taint'):
+                continue
+            os.unlink(name)
+
+    stamp = stamp_internal(task, d, file_name)
+    # Remove the file and recreate to force timestamp
+    # change on broken NFS filesystems
+    if stamp:
+        bb.utils.remove(stamp)
+        open(stamp, "w").close()
+
+    # If we're in task context, write out a signature file for each task
+    # as it completes
+    if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
+        stampbase = stamp_internal(task, d, None, True)
+        file_name = d.getVar('BB_FILENAME', True)
+        bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True)
+
+def del_stamp(task, d, file_name = None):
+    """
+    Removes a stamp for a given task
+    (d can be a data dict or dataCache)
+    """
+    stamp = stamp_internal(task, d, file_name)
+    bb.utils.remove(stamp)
+
+def write_taint(task, d, file_name = None):
+    """
+    Creates a "taint" file which will force the specified task and its
+    dependents to be re-run the next time by influencing the value of its
+    taskhash.
+    (d can be a data dict or dataCache)
+    """
+    import uuid
+    if file_name:
+        taintfn = d.stamp[file_name] + '.' + task + '.taint'
+    else:
+        taintfn = d.getVar('STAMP', True) + '.' + task + '.taint'
+    bb.utils.mkdirhier(os.path.dirname(taintfn))
+    # The specific content of the taint file is not really important,
+    # we just need it to be random, so a random UUID is used
+    with open(taintfn, 'w') as taintf:
+        taintf.write(str(uuid.uuid4()))
+
+def stampfile(taskname, d, file_name = None):
+    """
+    Return the stamp for a given task
+    (d can be a data dict or dataCache)
+    """
+    return stamp_internal(taskname, d, file_name)
+
+def add_tasks(tasklist, d):
+    task_deps = d.getVar('_task_deps', False)
+    if not task_deps:
+        task_deps = {}
+    if not 'tasks' in task_deps:
+        task_deps['tasks'] = []
+    if not 'parents' in task_deps:
+        task_deps['parents'] = {}
+
+    for task in tasklist:
+        task = d.expand(task)
+
+        d.setVarFlag(task, 'task', 1)
+
+        if not task in task_deps['tasks']:
+            task_deps['tasks'].append(task)
+
+        flags = d.getVarFlags(task)
+        def getTask(name):
+            if not name in task_deps:
+                task_deps[name] = {}
+            if name in flags:
+                deptask = d.expand(flags[name])
+                task_deps[name][task] = deptask
+        getTask('depends')
+        getTask('rdepends')
+        getTask('deptask')
+        getTask('rdeptask')
+        getTask('recrdeptask')
+        getTask('recideptask')
+        getTask('nostamp')
+        getTask('fakeroot')
+        getTask('noexec')
+        getTask('umask')
+        task_deps['parents'][task] = []
+        if 'deps' in flags:
+            for dep in flags['deps']:
+                dep = d.expand(dep)
+                task_deps['parents'][task].append(dep)
+
+    # don't assume holding a reference
+    d.setVar('_task_deps', task_deps)
+
+def addtask(task, before, after, d):
+    if task[:3] != "do_":
+        task = "do_" + task
+
+    d.setVarFlag(task, "task", 1)
+    bbtasks = d.getVar('__BBTASKS', False) or []
+    if task not in bbtasks:
+        bbtasks.append(task)
+    d.setVar('__BBTASKS', bbtasks)
+
+    existing = d.getVarFlag(task, "deps", False) or []
+    if after is not None:
+        # set up deps for function
+        for entry in after.split():
+            if entry not in existing:
+                existing.append(entry)
+    d.setVarFlag(task, "deps", existing)
+    if before is not None:
+        # set up things that depend on this func
+        for entry in before.split():
+            existing = d.getVarFlag(entry, "deps", False) or []
+            if task not in existing:
+                d.setVarFlag(entry, "deps", [task] + existing)
+
+def deltask(task, d):
+    if task[:3] != "do_":
+        task = "do_" + task
+
+    bbtasks = d.getVar('__BBTASKS', False) or []
+    if task in bbtasks:
+        bbtasks.remove(task)
+        d.setVar('__BBTASKS', bbtasks)
+
+    d.delVarFlag(task, 'deps')
+    for bbtask in d.getVar('__BBTASKS', False) or []:
+        deps = d.getVarFlag(bbtask, 'deps', False) or []
+        if task in deps:
+            deps.remove(task)
+            d.setVarFlag(bbtask, 'deps', deps)

+ 849 - 0
bitbake/lib/bb/cache.py

@@ -0,0 +1,849 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# BitBake Cache implementation
+#
+# Caching of bitbake variables before task execution
+
+# Copyright (C) 2006        Richard Purdie
+# Copyright (C) 2012        Intel Corporation
+
+# but small sections based on code from bin/bitbake:
+# Copyright (C) 2003, 2004  Chris Larson
+# Copyright (C) 2003, 2004  Phil Blundell
+# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
+# Copyright (C) 2005        Holger Hans Peter Freyther
+# Copyright (C) 2005        ROAD GmbH
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+
+import os
+import logging
+from collections import defaultdict
+import bb.utils
+
+logger = logging.getLogger("BitBake.Cache")
+
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+    logger.info("Importing cPickle failed. "
+                "Falling back to a very slow implementation.")
+
+__cache_version__ = "149"
+
+def getCacheFile(path, filename, data_hash):
+    return os.path.join(path, filename + "." + data_hash)
+
+# RecipeInfoCommon defines common data retrieving methods
+# from meta data for caches. CoreRecipeInfo as well as other
+# Extra RecipeInfo needs to inherit this class
+class RecipeInfoCommon(object):
+
+    @classmethod
+    def listvar(cls, var, metadata):
+        return cls.getvar(var, metadata).split()
+
+    @classmethod
+    def intvar(cls, var, metadata):
+        return int(cls.getvar(var, metadata) or 0)
+
+    @classmethod
+    def depvar(cls, var, metadata):
+        return bb.utils.explode_deps(cls.getvar(var, metadata))
+
+    @classmethod
+    def pkgvar(cls, var, packages, metadata):
+        return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
+                    for pkg in packages)
+
+    @classmethod
+    def taskvar(cls, var, tasks, metadata):
+        return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
+                    for task in tasks)
+
+    @classmethod
+    def flaglist(cls, flag, varlist, metadata, squash=False):
+        out_dict = dict((var, metadata.getVarFlag(var, flag, True))
+                    for var in varlist)
+        if squash:
+            return dict((k,v) for (k,v) in out_dict.iteritems() if v)
+        else:
+            return out_dict
+
+    @classmethod
+    def getvar(cls, var, metadata, expand = True):
+        return metadata.getVar(var, expand) or ''
+
+
+class CoreRecipeInfo(RecipeInfoCommon):
+    __slots__ = ()
+
+    cachefile = "bb_cache.dat"   
+
+    def __init__(self, filename, metadata):      
+        self.file_depends = metadata.getVar('__depends', False)
+        self.timestamp = bb.parse.cached_mtime(filename)
+        self.variants = self.listvar('__VARIANTS', metadata) + ['']
+        self.appends = self.listvar('__BBAPPEND', metadata)
+        self.nocache = self.getvar('__BB_DONT_CACHE', metadata)
+
+        self.skipreason = self.getvar('__SKIPPED', metadata)
+        if self.skipreason:
+            self.pn = self.getvar('PN', metadata) or bb.parse.BBHandler.vars_from_file(filename,metadata)[0]
+            self.skipped = True
+            self.provides  = self.depvar('PROVIDES', metadata)
+            self.rprovides = self.depvar('RPROVIDES', metadata)
+            return
+
+        self.tasks = metadata.getVar('__BBTASKS', False)
+
+        self.pn = self.getvar('PN', metadata)
+        self.packages = self.listvar('PACKAGES', metadata)
+        if not self.pn in self.packages:
+            self.packages.append(self.pn)
+
+        self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
+        self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
+
+        self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
+
+        self.skipped = False
+        self.pe = self.getvar('PE', metadata)
+        self.pv = self.getvar('PV', metadata)
+        self.pr = self.getvar('PR', metadata)
+        self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
+        self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
+        self.stamp = self.getvar('STAMP', metadata)
+        self.stampclean = self.getvar('STAMPCLEAN', metadata)        
+        self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
+        self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
+        self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
+        self.depends          = self.depvar('DEPENDS', metadata)
+        self.provides         = self.depvar('PROVIDES', metadata)
+        self.rdepends         = self.depvar('RDEPENDS', metadata)
+        self.rprovides        = self.depvar('RPROVIDES', metadata)
+        self.rrecommends      = self.depvar('RRECOMMENDS', metadata)
+        self.rprovides_pkg    = self.pkgvar('RPROVIDES', self.packages, metadata)
+        self.rdepends_pkg     = self.pkgvar('RDEPENDS', self.packages, metadata)
+        self.rrecommends_pkg  = self.pkgvar('RRECOMMENDS', self.packages, metadata)
+        self.inherits         = self.getvar('__inherit_cache', metadata, expand=False)
+        self.fakerootenv      = self.getvar('FAKEROOTENV', metadata)
+        self.fakerootdirs     = self.getvar('FAKEROOTDIRS', metadata)
+        self.fakerootnoenv    = self.getvar('FAKEROOTNOENV', metadata)
+        self.extradepsfunc    = self.getvar('calculate_extra_depends', metadata)
+
+    @classmethod
+    def init_cacheData(cls, cachedata):
+        # CacheData in Core RecipeInfo Class
+        cachedata.task_deps = {}
+        cachedata.pkg_fn = {}
+        cachedata.pkg_pn = defaultdict(list)
+        cachedata.pkg_pepvpr = {}
+        cachedata.pkg_dp = {}
+
+        cachedata.stamp = {}
+        cachedata.stampclean = {}
+        cachedata.stamp_extrainfo = {}
+        cachedata.file_checksums = {}
+        cachedata.fn_provides = {}
+        cachedata.pn_provides = defaultdict(list)
+        cachedata.all_depends = []
+
+        cachedata.deps = defaultdict(list)
+        cachedata.packages = defaultdict(list)
+        cachedata.providers = defaultdict(list)
+        cachedata.rproviders = defaultdict(list)
+        cachedata.packages_dynamic = defaultdict(list)
+
+        cachedata.rundeps = defaultdict(lambda: defaultdict(list))
+        cachedata.runrecs = defaultdict(lambda: defaultdict(list))
+        cachedata.possible_world = []
+        cachedata.universe_target = []
+        cachedata.hashfn = {}
+
+        cachedata.basetaskhash = {}
+        cachedata.inherits = {}
+        cachedata.fakerootenv = {}
+        cachedata.fakerootnoenv = {}
+        cachedata.fakerootdirs = {}
+        cachedata.extradepsfunc = {}
+
+    def add_cacheData(self, cachedata, fn):
+        cachedata.task_deps[fn] = self.task_deps
+        cachedata.pkg_fn[fn] = self.pn
+        cachedata.pkg_pn[self.pn].append(fn)
+        cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
+        cachedata.pkg_dp[fn] = self.defaultpref
+        cachedata.stamp[fn] = self.stamp
+        cachedata.stampclean[fn] = self.stampclean
+        cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
+        cachedata.file_checksums[fn] = self.file_checksums
+
+        provides = [self.pn]
+        for provide in self.provides:
+            if provide not in provides:
+                provides.append(provide)
+        cachedata.fn_provides[fn] = provides
+
+        for provide in provides:
+            cachedata.providers[provide].append(fn)
+            if provide not in cachedata.pn_provides[self.pn]:
+                cachedata.pn_provides[self.pn].append(provide)
+
+        for dep in self.depends:
+            if dep not in cachedata.deps[fn]:
+                cachedata.deps[fn].append(dep)
+            if dep not in cachedata.all_depends:
+                cachedata.all_depends.append(dep)
+
+        rprovides = self.rprovides
+        for package in self.packages:
+            cachedata.packages[package].append(fn)
+            rprovides += self.rprovides_pkg[package]
+
+        for rprovide in rprovides:
+            if fn not in cachedata.rproviders[rprovide]:
+                cachedata.rproviders[rprovide].append(fn)
+
+        for package in self.packages_dynamic:
+            cachedata.packages_dynamic[package].append(fn)
+
+        # Build hash of runtime depends and recommends
+        for package in self.packages + [self.pn]:
+            cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
+            cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
+
+        # Collect files we may need for possible world-dep
+        # calculations
+        if self.not_world:
+            logger.debug(1, "EXCLUDE FROM WORLD: %s", fn)
+        else:
+            cachedata.possible_world.append(fn)
+
+        # create a collection of all targets for sanity checking
+        # tasks, such as upstream versions, license, and tools for
+        # task and image creation.
+        cachedata.universe_target.append(self.pn)
+
+        cachedata.hashfn[fn] = self.hashfilename
+        for task, taskhash in self.basetaskhashes.iteritems():
+            identifier = '%s.%s' % (fn, task)
+            cachedata.basetaskhash[identifier] = taskhash
+
+        cachedata.inherits[fn] = self.inherits
+        cachedata.fakerootenv[fn] = self.fakerootenv
+        cachedata.fakerootnoenv[fn] = self.fakerootnoenv
+        cachedata.fakerootdirs[fn] = self.fakerootdirs
+        cachedata.extradepsfunc[fn] = self.extradepsfunc
+
+
+
+class Cache(object):
+    """
+    BitBake Cache implementation
+    """
+
+    def __init__(self, data, data_hash, caches_array):
+        # Pass caches_array information into Cache Constructor
+        # It will be used later for deciding whether we 
+        # need extra cache file dump/load support 
+        self.caches_array = caches_array
+        self.cachedir = data.getVar("CACHE", True)
+        self.clean = set()
+        self.checked = set()
+        self.depends_cache = {}
+        self.data = None
+        self.data_fn = None
+        self.cacheclean = True
+        self.data_hash = data_hash
+
+        if self.cachedir in [None, '']:
+            self.has_cache = False
+            logger.info("Not using a cache. "
+                        "Set CACHE = <directory> to enable.")
+            return
+
+        self.has_cache = True
+        self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
+
+        logger.debug(1, "Using cache in '%s'", self.cachedir)
+        bb.utils.mkdirhier(self.cachedir)
+
+        cache_ok = True
+        if self.caches_array:
+            for cache_class in self.caches_array:
+                if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
+                    cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
+                    cache_ok = cache_ok and os.path.exists(cachefile)
+                    cache_class.init_cacheData(self)
+        if cache_ok:
+            self.load_cachefile()
+        elif os.path.isfile(self.cachefile):
+            logger.info("Out of date cache found, rebuilding...")
+
+    def load_cachefile(self):
+        # Firstly, using core cache file information for
+        # valid checking
+        with open(self.cachefile, "rb") as cachefile:
+            pickled = pickle.Unpickler(cachefile)
+            try:
+                cache_ver = pickled.load()
+                bitbake_ver = pickled.load()
+            except Exception:
+                logger.info('Invalid cache, rebuilding...')
+                return
+
+            if cache_ver != __cache_version__:
+                logger.info('Cache version mismatch, rebuilding...')
+                return
+            elif bitbake_ver != bb.__version__:
+                logger.info('Bitbake version mismatch, rebuilding...')
+                return
+
+
+        cachesize = 0
+        previous_progress = 0
+        previous_percent = 0
+
+        # Calculate the correct cachesize of all those cache files
+        for cache_class in self.caches_array:
+            if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
+                cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
+                with open(cachefile, "rb") as cachefile:
+                    cachesize += os.fstat(cachefile.fileno()).st_size
+
+        bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
+        
+        for cache_class in self.caches_array:
+            if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
+                cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
+                with open(cachefile, "rb") as cachefile:
+                    pickled = pickle.Unpickler(cachefile)                    
+                    while cachefile:
+                        try:
+                            key = pickled.load()
+                            value = pickled.load()
+                        except Exception:
+                            break
+                        if self.depends_cache.has_key(key):
+                            self.depends_cache[key].append(value)
+                        else:
+                            self.depends_cache[key] = [value]
+                        # only fire events on even percentage boundaries
+                        current_progress = cachefile.tell() + previous_progress
+                        current_percent = 100 * current_progress / cachesize
+                        if current_percent > previous_percent:
+                            previous_percent = current_percent
+                            bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
+                                          self.data)
+
+                    previous_progress += current_progress
+
+        # Note: depends cache number is corresponding to the parsing file numbers.
+        # The same file has several caches, still regarded as one item in the cache
+        bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
+                                                  len(self.depends_cache)),
+                      self.data)
+
+    
+    @staticmethod
+    def virtualfn2realfn(virtualfn):
+        """
+        Convert a virtual file name to a real one + the associated subclass keyword
+        """
+
+        fn = virtualfn
+        cls = ""
+        if virtualfn.startswith('virtual:'):
+            elems = virtualfn.split(':')
+            cls = ":".join(elems[1:-1])
+            fn = elems[-1]
+        return (fn, cls)
+
+    @staticmethod
+    def realfn2virtual(realfn, cls):
+        """
+        Convert a real filename + the associated subclass keyword to a virtual filename
+        """
+        if cls == "":
+            return realfn
+        return "virtual:" + cls + ":" + realfn
+
+    @classmethod
+    def loadDataFull(cls, virtualfn, appends, cfgData):
+        """
+        Return a complete set of data for fn.
+        To do this, we need to parse the file.
+        """
+
+        (fn, virtual) = cls.virtualfn2realfn(virtualfn)
+
+        logger.debug(1, "Parsing %s (full)", fn)
+
+        cfgData.setVar("__ONLYFINALISE", virtual or "default")
+        bb_data = cls.load_bbfile(fn, appends, cfgData)
+        return bb_data[virtual]
+
+    @classmethod
+    def parse(cls, filename, appends, configdata, caches_array):
+        """Parse the specified filename, returning the recipe information"""
+        infos = []
+        datastores = cls.load_bbfile(filename, appends, configdata)
+        depends = []
+        for variant, data in sorted(datastores.iteritems(),
+                                    key=lambda i: i[0],
+                                    reverse=True):
+            virtualfn = cls.realfn2virtual(filename, variant)
+            depends = depends + (data.getVar("__depends", False) or [])
+            if depends and not variant:
+                data.setVar("__depends", depends)
+
+            info_array = []
+            for cache_class in caches_array:
+                if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
+                    info = cache_class(filename, data)
+                    info_array.append(info)
+            infos.append((virtualfn, info_array))
+
+        return infos
+
+    def load(self, filename, appends, configdata):
+        """Obtain the recipe information for the specified filename,
+        using cached values if available, otherwise parsing.
+
+        Note that if it does parse to obtain the info, it will not
+        automatically add the information to the cache or to your
+        CacheData.  Use the add or add_info method to do so after
+        running this, or use loadData instead."""
+        cached = self.cacheValid(filename, appends)
+        if cached:
+            infos = []
+            # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
+            info_array = self.depends_cache[filename]
+            for variant in info_array[0].variants:
+                virtualfn = self.realfn2virtual(filename, variant)
+                infos.append((virtualfn, self.depends_cache[virtualfn]))
+        else:
+            logger.debug(1, "Parsing %s", filename)
+            return self.parse(filename, appends, configdata, self.caches_array)
+
+        return cached, infos
+
+    def loadData(self, fn, appends, cfgData, cacheData):
+        """Load the recipe info for the specified filename,
+        parsing and adding to the cache if necessary, and adding
+        the recipe information to the supplied CacheData instance."""
+        skipped, virtuals = 0, 0
+
+        cached, infos = self.load(fn, appends, cfgData)
+        for virtualfn, info_array in infos:
+            if info_array[0].skipped:
+                logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
+                skipped += 1
+            else:
+                self.add_info(virtualfn, info_array, cacheData, not cached)
+                virtuals += 1
+
+        return cached, skipped, virtuals
+
+    def cacheValid(self, fn, appends):
+        """
+        Is the cache valid for fn?
+        Fast version, no timestamps checked.
+        """
+        if fn not in self.checked:
+            self.cacheValidUpdate(fn, appends)
+
+        # Is cache enabled?
+        if not self.has_cache:
+            return False
+        if fn in self.clean:
+            return True
+        return False
+
+    def cacheValidUpdate(self, fn, appends):
+        """
+        Is the cache valid for fn?
+        Make thorough (slower) checks including timestamps.
+        """
+        # Is cache enabled?
+        if not self.has_cache:
+            return False
+
+        self.checked.add(fn)
+
+        # File isn't in depends_cache
+        if not fn in self.depends_cache:
+            logger.debug(2, "Cache: %s is not cached", fn)
+            return False
+
+        mtime = bb.parse.cached_mtime_noerror(fn)
+
+        # Check file still exists
+        if mtime == 0:
+            logger.debug(2, "Cache: %s no longer exists", fn)
+            self.remove(fn)
+            return False
+
+        info_array = self.depends_cache[fn]
+        # Check the file's timestamp
+        if mtime != info_array[0].timestamp:
+            logger.debug(2, "Cache: %s changed", fn)
+            self.remove(fn)
+            return False
+
+        # Check dependencies are still valid
+        depends = info_array[0].file_depends
+        if depends:
+            for f, old_mtime in depends:
+                fmtime = bb.parse.cached_mtime_noerror(f)
+                # Check if file still exists
+                if old_mtime != 0 and fmtime == 0:
+                    logger.debug(2, "Cache: %s's dependency %s was removed",
+                                    fn, f)
+                    self.remove(fn)
+                    return False
+
+                if (fmtime != old_mtime):
+                    logger.debug(2, "Cache: %s's dependency %s changed",
+                                    fn, f)
+                    self.remove(fn)
+                    return False
+
+        if hasattr(info_array[0], 'file_checksums'):
+            for _, fl in info_array[0].file_checksums.items():
+                fl = fl.strip()
+                while fl:
+                    # A .split() would be simpler but means spaces or colons in filenames would break
+                    a = fl.find(":True")
+                    b = fl.find(":False")
+                    if ((a < 0) and b) or ((b > 0) and (b < a)):
+                       f = fl[:b+6]
+                       fl = fl[b+7:]
+                    elif ((b < 0) and a) or ((a > 0) and (a < b)):
+                       f = fl[:a+5]
+                       fl = fl[a+6:]
+                    else:
+                       break
+                    fl = fl.strip()
+                    if "*" in f:
+                        continue
+                    f, exist = f.split(":")
+                    if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
+                        logger.debug(2, "Cache: %s's file checksum list file %s changed",
+                                        fn, f)
+                        self.remove(fn)
+                        return False
+
+        if appends != info_array[0].appends:
+            logger.debug(2, "Cache: appends for %s changed", fn)
+            logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
+            self.remove(fn)
+            return False
+
+        invalid = False
+        for cls in info_array[0].variants:
+            virtualfn = self.realfn2virtual(fn, cls)
+            self.clean.add(virtualfn)
+            if virtualfn not in self.depends_cache:
+                logger.debug(2, "Cache: %s is not cached", virtualfn)
+                invalid = True
+
+        # If any one of the variants is not present, mark as invalid for all
+        if invalid:
+            for cls in info_array[0].variants:
+                virtualfn = self.realfn2virtual(fn, cls)
+                if virtualfn in self.clean:
+                    logger.debug(2, "Cache: Removing %s from cache", virtualfn)
+                    self.clean.remove(virtualfn)
+            if fn in self.clean:
+                logger.debug(2, "Cache: Marking %s as not clean", fn)
+                self.clean.remove(fn)
+            return False
+
+        self.clean.add(fn)
+        return True
+
+    def remove(self, fn):
+        """
+        Remove a fn from the cache
+        Called from the parser in error cases
+        """
+        if fn in self.depends_cache:
+            logger.debug(1, "Removing %s from cache", fn)
+            del self.depends_cache[fn]
+        if fn in self.clean:
+            logger.debug(1, "Marking %s as unclean", fn)
+            self.clean.remove(fn)
+
+    def sync(self):
+        """
+        Save the cache
+        Called from the parser when complete (or exiting)
+        """
+
+        if not self.has_cache:
+            return
+
+        if self.cacheclean:
+            logger.debug(2, "Cache is clean, not saving.")
+            return
+
+        file_dict = {}
+        pickler_dict = {}
+        for cache_class in self.caches_array:
+            if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
+                cache_class_name = cache_class.__name__
+                cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
+                file_dict[cache_class_name] = open(cachefile, "wb")
+                pickler_dict[cache_class_name] =  pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL)
+                   
+        pickler_dict['CoreRecipeInfo'].dump(__cache_version__)
+        pickler_dict['CoreRecipeInfo'].dump(bb.__version__)
+
+        try:
+            for key, info_array in self.depends_cache.iteritems():
+                for info in info_array:
+                    if isinstance(info, RecipeInfoCommon):
+                        cache_class_name = info.__class__.__name__
+                        pickler_dict[cache_class_name].dump(key)
+                        pickler_dict[cache_class_name].dump(info)
+        finally:
+            for cache_class in self.caches_array:
+                if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
+                    cache_class_name = cache_class.__name__
+                    file_dict[cache_class_name].close()
+
+        del self.depends_cache
+
+    @staticmethod
+    def mtime(cachefile):
+        return bb.parse.cached_mtime_noerror(cachefile)
+
+    def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
+        if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
+            cacheData.add_from_recipeinfo(filename, info_array)
+
+            if watcher:
+                watcher(info_array[0].file_depends)
+
+        if not self.has_cache:
+            return
+
+        if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
+            if parsed:
+                self.cacheclean = False
+            self.depends_cache[filename] = info_array
+
+    def add(self, file_name, data, cacheData, parsed=None):
+        """
+        Save data we need into the cache
+        """
+
+        realfn = self.virtualfn2realfn(file_name)[0]
+
+        info_array = []
+        for cache_class in self.caches_array:
+            if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
+                info_array.append(cache_class(realfn, data))
+        self.add_info(file_name, info_array, cacheData, parsed)
+
+    @staticmethod
+    def load_bbfile(bbfile, appends, config):
+        """
+        Load and parse one .bb build file
+        Return the data and whether parsing resulted in the file being skipped
+        """
+        chdir_back = False
+
+        from bb import parse
+
+        # expand tmpdir to include this topdir
+        config.setVar('TMPDIR', config.getVar('TMPDIR', True) or "")
+        bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
+        oldpath = os.path.abspath(os.getcwd())
+        parse.cached_mtime_noerror(bbfile_loc)
+        bb_data = config.createCopy()
+        # The ConfHandler first looks if there is a TOPDIR and if not
+        # then it would call getcwd().
+        # Previously, we chdir()ed to bbfile_loc, called the handler
+        # and finally chdir()ed back, a couple of thousand times. We now
+        # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
+        if not bb_data.getVar('TOPDIR', False):
+            chdir_back = True
+            bb_data.setVar('TOPDIR', bbfile_loc)
+        try:
+            if appends:
+                bb_data.setVar('__BBAPPEND', " ".join(appends))
+            bb_data = parse.handle(bbfile, bb_data)
+            if chdir_back:
+                os.chdir(oldpath)
+            return bb_data
+        except:
+            if chdir_back:
+                os.chdir(oldpath)
+            raise
+
+
+def init(cooker):
+    """
+    The Objective: Cache the minimum amount of data possible yet get to the
+    stage of building packages (i.e. tryBuild) without reparsing any .bb files.
+
+    To do this, we intercept getVar calls and only cache the variables we see
+    being accessed. We rely on the cache getVar calls being made for all
+    variables bitbake might need to use to reach this stage. For each cached
+    file we need to track:
+
+    * Its mtime
+    * The mtimes of all its dependencies
+    * Whether it caused a parse.SkipRecipe exception
+
+    Files causing parsing errors are evicted from the cache.
+
+    """
+    return Cache(cooker.configuration.data, cooker.configuration.data_hash)
+
+
+class CacheData(object):
+    """
+    The data structures we compile from the cached data
+    """
+
+    def __init__(self, caches_array):
+        self.caches_array = caches_array
+        for cache_class in self.caches_array:
+            if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
+                cache_class.init_cacheData(self)        
+
+        # Direct cache variables
+        self.task_queues = {}
+        self.preferred = {}
+        self.tasks = {}
+        # Indirect Cache variables (set elsewhere)
+        self.ignored_dependencies = []
+        self.world_target = set()
+        self.bbfile_priority = {}
+
+    def add_from_recipeinfo(self, fn, info_array):
+        for info in info_array:
+            info.add_cacheData(self, fn)
+
+class MultiProcessCache(object):
+    """
+    BitBake multi-process cache implementation
+
+    Used by the codeparser & file checksum caches
+    """
+
+    def __init__(self):
+        self.cachefile = None
+        self.cachedata = self.create_cachedata()
+        self.cachedata_extras = self.create_cachedata()
+
+    def init_cache(self, d, cache_file_name=None):
+        cachedir = (d.getVar("PERSISTENT_DIR", True) or
+                    d.getVar("CACHE", True))
+        if cachedir in [None, '']:
+            return
+        bb.utils.mkdirhier(cachedir)
+        self.cachefile = os.path.join(cachedir,
+                                      cache_file_name or self.__class__.cache_file_name)
+        logger.debug(1, "Using cache in '%s'", self.cachefile)
+
+        glf = bb.utils.lockfile(self.cachefile + ".lock")
+
+        try:
+            with open(self.cachefile, "rb") as f:
+                p = pickle.Unpickler(f)
+                data, version = p.load()
+        except:
+            bb.utils.unlockfile(glf)
+            return
+
+        bb.utils.unlockfile(glf)
+
+        if version != self.__class__.CACHE_VERSION:
+            return
+
+        self.cachedata = data
+
+    def create_cachedata(self):
+        data = [{}]
+        return data
+
+    def save_extras(self):
+        if not self.cachefile:
+            return
+
+        glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
+
+        i = os.getpid()
+        lf = None
+        while not lf:
+            lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
+            if not lf or os.path.exists(self.cachefile + "-" + str(i)):
+                if lf:
+                    bb.utils.unlockfile(lf)
+                    lf = None
+                i = i + 1
+                continue
+
+            with open(self.cachefile + "-" + str(i), "wb") as f:
+                p = pickle.Pickler(f, -1)
+                p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
+
+        bb.utils.unlockfile(lf)
+        bb.utils.unlockfile(glf)
+
+    def merge_data(self, source, dest):
+        for j in range(0,len(dest)):
+            for h in source[j]:
+                if h not in dest[j]:
+                    dest[j][h] = source[j][h]
+
+    def save_merge(self):
+        if not self.cachefile:
+            return
+
+        glf = bb.utils.lockfile(self.cachefile + ".lock")
+
+        data = self.cachedata
+
+        for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
+            f = os.path.join(os.path.dirname(self.cachefile), f)
+            try:
+                with open(f, "rb") as fd:
+                    p = pickle.Unpickler(fd)
+                    extradata, version = p.load()
+            except (IOError, EOFError):
+                os.unlink(f)
+                continue
+
+            if version != self.__class__.CACHE_VERSION:
+                os.unlink(f)
+                continue
+
+            self.merge_data(extradata, data)
+            os.unlink(f)
+
+        with open(self.cachefile, "wb") as f:
+            p = pickle.Pickler(f, -1)
+            p.dump([data, self.__class__.CACHE_VERSION])
+
+        bb.utils.unlockfile(glf)
+

+ 75 - 0
bitbake/lib/bb/cache_extra.py

@@ -0,0 +1,75 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Extra RecipeInfo will be all defined in this file. Currently,
+# Only Hob (Image Creator) Requests some extra fields. So
+# HobRecipeInfo is defined. It's named HobRecipeInfo because it
+# is introduced by 'hob'. Users could also introduce other
+# RecipeInfo or simply use those already defined RecipeInfo.
+# In the following patch, this newly defined new extra RecipeInfo
+# will be dynamically loaded and used for loading/saving the extra
+# cache fields  
+
+# Copyright (C) 2011, Intel Corporation. All rights reserved.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+from bb.cache import RecipeInfoCommon
+
+class HobRecipeInfo(RecipeInfoCommon):
+    __slots__ = ()
+
+    classname = "HobRecipeInfo"
+    # please override this member with the correct data cache file
+    # such as (bb_cache.dat, bb_extracache_hob.dat) 
+    cachefile = "bb_extracache_" + classname +".dat"        
+
+    # override this member with the list of extra cache fields
+    # that this class will provide
+    cachefields = ['summary', 'license', 'section',
+            'description', 'homepage', 'bugtracker',
+            'prevision', 'files_info']
+
+    def __init__(self, filename, metadata):
+
+        self.summary = self.getvar('SUMMARY', metadata)
+        self.license = self.getvar('LICENSE', metadata)
+        self.section = self.getvar('SECTION', metadata)
+        self.description = self.getvar('DESCRIPTION', metadata)
+        self.homepage = self.getvar('HOMEPAGE', metadata)
+        self.bugtracker = self.getvar('BUGTRACKER', metadata)
+        self.prevision = self.getvar('PR', metadata)
+        self.files_info = self.getvar('FILES_INFO', metadata)
+
+    @classmethod
+    def init_cacheData(cls, cachedata):
+        # CacheData in Hob RecipeInfo Class
+        cachedata.summary = {}
+        cachedata.license = {}
+        cachedata.section = {}
+        cachedata.description = {}
+        cachedata.homepage = {}
+        cachedata.bugtracker = {}
+        cachedata.prevision = {}
+        cachedata.files_info = {}
+
+    def add_cacheData(self, cachedata, fn):
+        cachedata.summary[fn] = self.summary
+        cachedata.license[fn] = self.license
+        cachedata.section[fn] = self.section
+        cachedata.description[fn] = self.description
+        cachedata.homepage[fn] = self.homepage
+        cachedata.bugtracker[fn] = self.bugtracker
+        cachedata.prevision[fn] = self.prevision
+        cachedata.files_info[fn] = self.files_info

+ 137 - 0
bitbake/lib/bb/checksum.py

@@ -0,0 +1,137 @@
+# Local file checksum cache implementation
+#
+# Copyright (C) 2012 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import glob
+import operator
+import os
+import stat
+import bb.utils
+import logging
+from bb.cache import MultiProcessCache
+
+logger = logging.getLogger("BitBake.Cache")
+
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+    logger.info("Importing cPickle failed. "
+                "Falling back to a very slow implementation.")
+
+
+# mtime cache (non-persistent)
+# based upon the assumption that files do not change during bitbake run
+class FileMtimeCache(object):
+    cache = {}
+
+    def cached_mtime(self, f):
+        if f not in self.cache:
+            self.cache[f] = os.stat(f)[stat.ST_MTIME]
+        return self.cache[f]
+
+    def cached_mtime_noerror(self, f):
+        if f not in self.cache:
+            try:
+                self.cache[f] = os.stat(f)[stat.ST_MTIME]
+            except OSError:
+                return 0
+        return self.cache[f]
+
+    def update_mtime(self, f):
+        self.cache[f] = os.stat(f)[stat.ST_MTIME]
+        return self.cache[f]
+
+    def clear(self):
+        self.cache.clear()
+
+# Checksum + mtime cache (persistent)
+class FileChecksumCache(MultiProcessCache):
+    cache_file_name = "local_file_checksum_cache.dat"
+    CACHE_VERSION = 1
+
+    def __init__(self):
+        self.mtime_cache = FileMtimeCache()
+        MultiProcessCache.__init__(self)
+
+    def get_checksum(self, f):
+        entry = self.cachedata[0].get(f)
+        cmtime = self.mtime_cache.cached_mtime(f)
+        if entry:
+            (mtime, hashval) = entry
+            if cmtime == mtime:
+                return hashval
+            else:
+                bb.debug(2, "file %s changed mtime, recompute checksum" % f)
+
+        hashval = bb.utils.md5_file(f)
+        self.cachedata_extras[0][f] = (cmtime, hashval)
+        return hashval
+
+    def merge_data(self, source, dest):
+        for h in source[0]:
+            if h in dest:
+                (smtime, _) = source[0][h]
+                (dmtime, _) = dest[0][h]
+                if smtime > dmtime:
+                    dest[0][h] = source[0][h]
+            else:
+                dest[0][h] = source[0][h]
+
+    def get_checksums(self, filelist, pn):
+        """Get checksums for a list of files"""
+
+        def checksum_file(f):
+            try:
+                checksum = self.get_checksum(f)
+            except OSError as e:
+                bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
+                return None
+            return checksum
+
+        def checksum_dir(pth):
+            # Handle directories recursively
+            dirchecksums = []
+            for root, dirs, files in os.walk(pth):
+                for name in files:
+                    fullpth = os.path.join(root, name)
+                    checksum = checksum_file(fullpth)
+                    if checksum:
+                        dirchecksums.append((fullpth, checksum))
+            return dirchecksums
+
+        checksums = []
+        for pth in filelist.split():
+            exist = pth.split(":")[1]
+            if exist == "False":
+                continue
+            pth = pth.split(":")[0]
+            if '*' in pth:
+                # Handle globs
+                for f in glob.glob(pth):
+                    if os.path.isdir(f):
+                        checksums.extend(checksum_dir(f))
+                    else:
+                        checksum = checksum_file(f)
+                        checksums.append((f, checksum))
+            elif os.path.isdir(pth):
+                checksums.extend(checksum_dir(pth))
+            else:
+                checksum = checksum_file(pth)
+                checksums.append((pth, checksum))
+
+        checksums.sort(key=operator.itemgetter(1))
+        return checksums

+ 436 - 0
bitbake/lib/bb/codeparser.py

@@ -0,0 +1,436 @@
+import ast
+import codegen
+import logging
+import os.path
+import bb.utils, bb.data
+from itertools import chain
+from pysh import pyshyacc, pyshlex, sherrors
+from bb.cache import MultiProcessCache
+
+
+logger = logging.getLogger('BitBake.CodeParser')
+
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+    logger.info('Importing cPickle failed.  Falling back to a very slow implementation.')
+
+
+def check_indent(codestr):
+    """If the code is indented, add a top level piece of code to 'remove' the indentation"""
+
+    i = 0
+    while codestr[i] in ["\n", "\t", " "]:
+        i = i + 1
+
+    if i == 0:
+        return codestr
+
+    if codestr[i-1] == "\t" or codestr[i-1] == " ":
+        if codestr[0] == "\n":
+            # Since we're adding a line, we need to remove one line of any empty padding
+            # to ensure line numbers are correct
+            codestr = codestr[1:]
+        return "if 1:\n" + codestr
+
+    return codestr
+
+
+# Basically pickle, in python 2.7.3 at least, does badly with data duplication 
+# upon pickling and unpickling. Combine this with duplicate objects and things
+# are a mess.
+#
+# When the sets are originally created, python calls intern() on the set keys
+# which significantly improves memory usage. Sadly the pickle/unpickle process
+# doesn't call intern() on the keys and results in the same strings being duplicated
+# in memory. This also means pickle will save the same string multiple times in
+# the cache file.
+#
+# By having shell and python cacheline objects with setstate/getstate, we force
+# the object creation through our own routine where we can call intern (via internSet).
+#
+# We also use hashable frozensets and ensure we use references to these so that
+# duplicates can be removed, both in memory and in the resulting pickled data.
+#
+# By playing these games, the size of the cache file shrinks dramatically
+# meaning faster load times and the reloaded cache files also consume much less
+# memory. Smaller cache files, faster load times and lower memory usage is good.
+#
+# A custom getstate/setstate using tuples is actually worth 15% cachesize by
+# avoiding duplication of the attribute names!
+
+class SetCache(object):
+    def __init__(self):
+        self.setcache = {}
+
+    def internSet(self, items):
+        
+        new = []
+        for i in items:
+            new.append(intern(i))
+        s = frozenset(new)
+        if hash(s) in self.setcache:
+            return self.setcache[hash(s)]
+        self.setcache[hash(s)] = s
+        return s
+
+codecache = SetCache()
+
+class pythonCacheLine(object):
+    def __init__(self, refs, execs, contains):
+        self.refs = codecache.internSet(refs)
+        self.execs = codecache.internSet(execs)
+        self.contains = {}
+        for c in contains:
+            self.contains[c] = codecache.internSet(contains[c])
+
+    def __getstate__(self):
+        return (self.refs, self.execs, self.contains)
+
+    def __setstate__(self, state):
+        (refs, execs, contains) = state
+        self.__init__(refs, execs, contains)
+    def __hash__(self):
+        l = (hash(self.refs), hash(self.execs))
+        for c in sorted(self.contains.keys()):
+            l = l + (c, hash(self.contains[c]))
+        return hash(l)
+    def __repr__(self):
+        return " ".join([str(self.refs), str(self.execs), str(self.contains)]) 
+
+
+class shellCacheLine(object):
+    def __init__(self, execs):
+        self.execs = codecache.internSet(execs)
+
+    def __getstate__(self):
+        return (self.execs)
+
+    def __setstate__(self, state):
+        (execs) = state
+        self.__init__(execs)
+    def __hash__(self):
+        return hash(self.execs)
+    def __repr__(self):
+        return str(self.execs)
+
+class CodeParserCache(MultiProcessCache):
+    cache_file_name = "bb_codeparser.dat"
+    CACHE_VERSION = 7
+
+    def __init__(self):
+        MultiProcessCache.__init__(self)
+        self.pythoncache = self.cachedata[0]
+        self.shellcache = self.cachedata[1]
+        self.pythoncacheextras = self.cachedata_extras[0]
+        self.shellcacheextras = self.cachedata_extras[1]
+
+        # To avoid duplication in the codeparser cache, keep
+        # a lookup of hashes of objects we already have
+        self.pythoncachelines = {}
+        self.shellcachelines = {}
+
+    def newPythonCacheLine(self, refs, execs, contains):
+        cacheline = pythonCacheLine(refs, execs, contains)
+        h = hash(cacheline)
+        if h in self.pythoncachelines:
+            return self.pythoncachelines[h]
+        self.pythoncachelines[h] = cacheline
+        return cacheline
+
+    def newShellCacheLine(self, execs):
+        cacheline = shellCacheLine(execs)
+        h = hash(cacheline)
+        if h in self.shellcachelines:
+            return self.shellcachelines[h]
+        self.shellcachelines[h] = cacheline
+        return cacheline
+
+    def init_cache(self, d):
+        # Check if we already have the caches
+        if self.pythoncache:
+            return
+
+        MultiProcessCache.init_cache(self, d)
+
+        # cachedata gets re-assigned in the parent
+        self.pythoncache = self.cachedata[0]
+        self.shellcache = self.cachedata[1]
+
+    def create_cachedata(self):
+        data = [{}, {}]
+        return data
+
+codeparsercache = CodeParserCache()
+
+def parser_cache_init(d):
+    codeparsercache.init_cache(d)
+
+def parser_cache_save():
+    codeparsercache.save_extras()
+
+def parser_cache_savemerge():
+    codeparsercache.save_merge()
+
+Logger = logging.getLoggerClass()
+class BufferedLogger(Logger):
+    def __init__(self, name, level=0, target=None):
+        Logger.__init__(self, name)
+        self.setLevel(level)
+        self.buffer = []
+        self.target = target
+
+    def handle(self, record):
+        self.buffer.append(record)
+
+    def flush(self):
+        for record in self.buffer:
+            self.target.handle(record)
+        self.buffer = []
+
+class PythonParser():
+    getvars = (".getVar", ".appendVar", ".prependVar")
+    containsfuncs = ("bb.utils.contains", "base_contains", "bb.utils.contains_any")
+    execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
+
+    def warn(self, func, arg):
+        """Warn about calls of bitbake APIs which pass a non-literal
+        argument for the variable name, as we're not able to track such
+        a reference.
+        """
+
+        try:
+            funcstr = codegen.to_source(func)
+            argstr = codegen.to_source(arg)
+        except TypeError:
+            self.log.debug(2, 'Failed to convert function and argument to source form')
+        else:
+            self.log.debug(1, self.unhandled_message % (funcstr, argstr))
+
+    def visit_Call(self, node):
+        name = self.called_node_name(node.func)
+        if name and name.endswith(self.getvars) or name in self.containsfuncs:
+            if isinstance(node.args[0], ast.Str):
+                varname = node.args[0].s
+                if name in self.containsfuncs and isinstance(node.args[1], ast.Str):
+                    if varname not in self.contains:
+                        self.contains[varname] = set()
+                    self.contains[varname].add(node.args[1].s)
+                else:                      
+                    self.references.add(node.args[0].s)
+            else:
+                self.warn(node.func, node.args[0])
+        elif name and name.endswith(".expand"):
+            if isinstance(node.args[0], ast.Str):
+                value = node.args[0].s
+                d = bb.data.init()
+                parser = d.expandWithRefs(value, self.name)
+                self.references |= parser.references
+                self.execs |= parser.execs
+                for varname in parser.contains:
+                    if varname not in self.contains:
+                        self.contains[varname] = set()
+                    self.contains[varname] |= parser.contains[varname]
+        elif name in self.execfuncs:
+            if isinstance(node.args[0], ast.Str):
+                self.var_execs.add(node.args[0].s)
+            else:
+                self.warn(node.func, node.args[0])
+        elif name and isinstance(node.func, (ast.Name, ast.Attribute)):
+            self.execs.add(name)
+
+    def called_node_name(self, node):
+        """Given a called node, return its original string form"""
+        components = []
+        while node:
+            if isinstance(node, ast.Attribute):
+                components.append(node.attr)
+                node = node.value
+            elif isinstance(node, ast.Name):
+                components.append(node.id)
+                return '.'.join(reversed(components))
+            else:
+                break
+
+    def __init__(self, name, log):
+        self.name = name
+        self.var_execs = set()
+        self.contains = {}
+        self.execs = set()
+        self.references = set()
+        self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, log)
+
+        self.unhandled_message = "in call of %s, argument '%s' is not a string literal"
+        self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message)
+
+    def parse_python(self, node, lineno=0, filename="<string>"):
+        if not node or not node.strip():
+            return
+
+        h = hash(str(node))
+
+        if h in codeparsercache.pythoncache:
+            self.references = set(codeparsercache.pythoncache[h].refs)
+            self.execs = set(codeparsercache.pythoncache[h].execs)
+            self.contains = {}
+            for i in codeparsercache.pythoncache[h].contains:
+                self.contains[i] = set(codeparsercache.pythoncache[h].contains[i])
+            return
+
+        if h in codeparsercache.pythoncacheextras:
+            self.references = set(codeparsercache.pythoncacheextras[h].refs)
+            self.execs = set(codeparsercache.pythoncacheextras[h].execs)
+            self.contains = {}
+            for i in codeparsercache.pythoncacheextras[h].contains:
+                self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i])
+            return
+
+        # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
+        node = "\n" * int(lineno) + node
+        code = compile(check_indent(str(node)), filename, "exec",
+                       ast.PyCF_ONLY_AST)
+
+        for n in ast.walk(code):
+            if n.__class__.__name__ == "Call":
+                self.visit_Call(n)
+
+        self.execs.update(self.var_execs)
+
+        codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains)
+
+class ShellParser():
+    def __init__(self, name, log):
+        self.funcdefs = set()
+        self.allexecs = set()
+        self.execs = set()
+        self.log = BufferedLogger('BitBake.Data.%s' % name, logging.DEBUG, log)
+        self.unhandled_template = "unable to handle non-literal command '%s'"
+        self.unhandled_template = "while parsing %s, %s" % (name, self.unhandled_template)
+
+    def parse_shell(self, value):
+        """Parse the supplied shell code in a string, returning the external
+        commands it executes.
+        """
+
+        h = hash(str(value))
+
+        if h in codeparsercache.shellcache:
+            self.execs = set(codeparsercache.shellcache[h].execs)
+            return self.execs
+
+        if h in codeparsercache.shellcacheextras:
+            self.execs = set(codeparsercache.shellcacheextras[h].execs)
+            return self.execs
+
+        self._parse_shell(value)
+        self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
+
+        codeparsercache.shellcacheextras[h] = codeparsercache.newShellCacheLine(self.execs)
+
+        return self.execs
+
+    def _parse_shell(self, value):
+        try:
+            tokens, _ = pyshyacc.parse(value, eof=True, debug=False)
+        except pyshlex.NeedMore:
+            raise sherrors.ShellSyntaxError("Unexpected EOF")
+
+        for token in tokens:
+            self.process_tokens(token)
+
+    def process_tokens(self, tokens):
+        """Process a supplied portion of the syntax tree as returned by
+        pyshyacc.parse.
+        """
+
+        def function_definition(value):
+            self.funcdefs.add(value.name)
+            return [value.body], None
+
+        def case_clause(value):
+            # Element 0 of each item in the case is the list of patterns, and
+            # Element 1 of each item in the case is the list of commands to be
+            # executed when that pattern matches.
+            words = chain(*[item[0] for item in value.items])
+            cmds  = chain(*[item[1] for item in value.items])
+            return cmds, words
+
+        def if_clause(value):
+            main = chain(value.cond, value.if_cmds)
+            rest = value.else_cmds
+            if isinstance(rest, tuple) and rest[0] == "elif":
+                return chain(main, if_clause(rest[1]))
+            else:
+                return chain(main, rest)
+
+        def simple_command(value):
+            return None, chain(value.words, (assign[1] for assign in value.assigns))
+
+        token_handlers = {
+            "and_or": lambda x: ((x.left, x.right), None),
+            "async": lambda x: ([x], None),
+            "brace_group": lambda x: (x.cmds, None),
+            "for_clause": lambda x: (x.cmds, x.items),
+            "function_definition": function_definition,
+            "if_clause": lambda x: (if_clause(x), None),
+            "pipeline": lambda x: (x.commands, None),
+            "redirect_list": lambda x: ([x.cmd], None),
+            "subshell": lambda x: (x.cmds, None),
+            "while_clause": lambda x: (chain(x.condition, x.cmds), None),
+            "until_clause": lambda x: (chain(x.condition, x.cmds), None),
+            "simple_command": simple_command,
+            "case_clause": case_clause,
+        }
+
+        for token in tokens:
+            name, value = token
+            try:
+                more_tokens, words = token_handlers[name](value)
+            except KeyError:
+                raise NotImplementedError("Unsupported token type " + name)
+
+            if more_tokens:
+                self.process_tokens(more_tokens)
+
+            if words:
+                self.process_words(words)
+
+    def process_words(self, words):
+        """Process a set of 'words' in pyshyacc parlance, which includes
+        extraction of executed commands from $() blocks, as well as grabbing
+        the command name argument.
+        """
+
+        words = list(words)
+        for word in list(words):
+            wtree = pyshlex.make_wordtree(word[1])
+            for part in wtree:
+                if not isinstance(part, list):
+                    continue
+
+                if part[0] in ('`', '$('):
+                    command = pyshlex.wordtree_as_string(part[1:-1])
+                    self._parse_shell(command)
+
+                    if word[0] in ("cmd_name", "cmd_word"):
+                        if word in words:
+                            words.remove(word)
+
+        usetoken = False
+        for word in words:
+            if word[0] in ("cmd_name", "cmd_word") or \
+               (usetoken and word[0] == "TOKEN"):
+                if "=" in word[1]:
+                    usetoken = True
+                    continue
+
+                cmd = word[1]
+                if cmd.startswith("$"):
+                    self.log.debug(1, self.unhandled_template % cmd)
+                elif cmd == "eval":
+                    command = " ".join(word for _, word in words[1:])
+                    self._parse_shell(command)
+                else:
+                    self.allexecs.add(cmd)
+                break

+ 474 - 0
bitbake/lib/bb/command.py

@@ -0,0 +1,474 @@
+"""
+BitBake 'Command' module
+
+Provide an interface to interact with the bitbake server through 'commands'
+"""
+
+# Copyright (C) 2006-2007  Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+"""
+The bitbake server takes 'commands' from its UI/commandline.
+Commands are either synchronous or asynchronous.
+Async commands return data to the client in the form of events.
+Sync commands must only return data through the function return value
+and must not trigger events, directly or indirectly.
+Commands are queued in a CommandQueue
+"""
+
+import bb.event
+import bb.cooker
+
+class CommandCompleted(bb.event.Event):
+    pass
+
+class CommandExit(bb.event.Event):
+    def  __init__(self, exitcode):
+        bb.event.Event.__init__(self)
+        self.exitcode = int(exitcode)
+
+class CommandFailed(CommandExit):
+    def __init__(self, message):
+        self.error = message
+        CommandExit.__init__(self, 1)
+
+class CommandError(Exception):
+    pass
+
+class Command:
+    """
+    A queue of asynchronous commands for bitbake
+    """
+    def __init__(self, cooker):
+        self.cooker = cooker
+        self.cmds_sync = CommandsSync()
+        self.cmds_async = CommandsAsync()
+
+        # FIXME Add lock for this
+        self.currentAsyncCommand = None
+
+    def runCommand(self, commandline, ro_only = False):
+        command = commandline.pop(0)
+        if hasattr(CommandsSync, command):
+            # Can run synchronous commands straight away
+            command_method = getattr(self.cmds_sync, command)
+            if ro_only:
+                if not hasattr(command_method, 'readonly') or False == getattr(command_method, 'readonly'):
+                    return None, "Not able to execute not readonly commands in readonly mode"
+            try:
+                if getattr(command_method, 'needconfig', False):
+                    self.cooker.updateCacheSync()
+                result = command_method(self, commandline)
+            except CommandError as exc:
+                return None, exc.args[0]
+            except (Exception, SystemExit):
+                import traceback
+                return None, traceback.format_exc()
+            else:
+                return result, None
+        if self.currentAsyncCommand is not None:
+            return None, "Busy (%s in progress)" % self.currentAsyncCommand[0]
+        if command not in CommandsAsync.__dict__:
+            return None, "No such command"
+        self.currentAsyncCommand = (command, commandline)
+        self.cooker.configuration.server_register_idlecallback(self.cooker.runCommands, self.cooker)
+        return True, None
+
+    def runAsyncCommand(self):
+        try:
+            if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown):
+                # updateCache will trigger a shutdown of the parser
+                # and then raise BBHandledException triggering an exit
+                self.cooker.updateCache()
+                return False
+            if self.currentAsyncCommand is not None:
+                (command, options) = self.currentAsyncCommand
+                commandmethod = getattr(CommandsAsync, command)
+                needcache = getattr( commandmethod, "needcache" )
+                if needcache and self.cooker.state != bb.cooker.state.running:
+                    self.cooker.updateCache()
+                    return True
+                else:
+                    commandmethod(self.cmds_async, self, options)
+                    return False
+            else:
+                return False
+        except KeyboardInterrupt as exc:
+            self.finishAsyncCommand("Interrupted")
+            return False
+        except SystemExit as exc:
+            arg = exc.args[0]
+            if isinstance(arg, basestring):
+                self.finishAsyncCommand(arg)
+            else:
+                self.finishAsyncCommand("Exited with %s" % arg)
+            return False
+        except Exception as exc:
+            import traceback
+            if isinstance(exc, bb.BBHandledException):
+                self.finishAsyncCommand("")
+            else:
+                self.finishAsyncCommand(traceback.format_exc())
+            return False
+
+    def finishAsyncCommand(self, msg=None, code=None):
+        if msg or msg == "":
+            bb.event.fire(CommandFailed(msg), self.cooker.expanded_data)
+        elif code:
+            bb.event.fire(CommandExit(code), self.cooker.expanded_data)
+        else:
+            bb.event.fire(CommandCompleted(), self.cooker.expanded_data)
+        self.currentAsyncCommand = None
+        self.cooker.finishcommand()
+
+class CommandsSync:
+    """
+    A class of synchronous commands
+    These should run quickly so as not to hurt interactive performance.
+    These must not influence any running synchronous command.
+    """
+
+    def stateShutdown(self, command, params):
+        """
+        Trigger cooker 'shutdown' mode
+        """
+        command.cooker.shutdown(False)
+
+    def stateForceShutdown(self, command, params):
+        """
+        Stop the cooker
+        """
+        command.cooker.shutdown(True)
+
+    def getAllKeysWithFlags(self, command, params):
+        """
+        Returns a dump of the global state. Call with
+        variable flags to be retrieved as params.
+        """
+        flaglist = params[0]
+        return command.cooker.getAllKeysWithFlags(flaglist)
+    getAllKeysWithFlags.readonly = True
+
+    def getVariable(self, command, params):
+        """
+        Read the value of a variable from data
+        """
+        varname = params[0]
+        expand = True
+        if len(params) > 1:
+            expand = (params[1] == "True")
+
+        return command.cooker.data.getVar(varname, expand)
+    getVariable.readonly = True
+
+    def setVariable(self, command, params):
+        """
+        Set the value of variable in data
+        """
+        varname = params[0]
+        value = str(params[1])
+        command.cooker.data.setVar(varname, value)
+
+    def getSetVariable(self, command, params):
+        """
+        Read the value of a variable from data and set it into the datastore
+        which effectively expands and locks the value.
+        """
+        varname = params[0]
+        result = self.getVariable(command, params)
+        command.cooker.data.setVar(varname, result)
+        return result
+
+    def setConfig(self, command, params):
+        """
+        Set the value of variable in configuration
+        """
+        varname = params[0]
+        value = str(params[1])
+        setattr(command.cooker.configuration, varname, value)
+
+    def enableDataTracking(self, command, params):
+        """
+        Enable history tracking for variables
+        """
+        command.cooker.enableDataTracking()
+
+    def disableDataTracking(self, command, params):
+        """
+        Disable history tracking for variables
+        """
+        command.cooker.disableDataTracking()
+
+    def setPrePostConfFiles(self, command, params):
+        prefiles = params[0].split()
+        postfiles = params[1].split()
+        command.cooker.configuration.prefile = prefiles
+        command.cooker.configuration.postfile = postfiles
+    setPrePostConfFiles.needconfig = False
+
+    def getCpuCount(self, command, params):
+        """
+        Get the CPU count on the bitbake server
+        """
+        return bb.utils.cpu_count()
+    getCpuCount.readonly = True
+    getCpuCount.needconfig = False
+
+    def matchFile(self, command, params):
+        fMatch = params[0]
+        return command.cooker.matchFile(fMatch)
+    matchFile.needconfig = False
+
+    def generateNewImage(self, command, params):
+        image = params[0]
+        base_image = params[1]
+        package_queue = params[2]
+        timestamp = params[3]
+        description = params[4]
+        return command.cooker.generateNewImage(image, base_image,
+                        package_queue, timestamp, description)
+
+    def ensureDir(self, command, params):
+        directory = params[0]
+        bb.utils.mkdirhier(directory)
+    ensureDir.needconfig = False
+
+    def setVarFile(self, command, params):
+        """
+        Save a variable in a file; used for saving in a configuration file
+        """
+        var = params[0]
+        val = params[1]
+        default_file = params[2]
+        op = params[3]
+        command.cooker.modifyConfigurationVar(var, val, default_file, op)
+    setVarFile.needconfig = False
+
+    def removeVarFile(self, command, params):
+        """
+        Remove a variable declaration from a file
+        """
+        var = params[0]
+        command.cooker.removeConfigurationVar(var)
+    removeVarFile.needconfig = False
+
+    def createConfigFile(self, command, params):
+        """
+        Create an extra configuration file
+        """
+        name = params[0]
+        command.cooker.createConfigFile(name)
+    createConfigFile.needconfig = False
+
+    def setEventMask(self, command, params):
+        handlerNum = params[0]
+        llevel = params[1]
+        debug_domains = params[2]
+        mask = params[3]
+        return bb.event.set_UIHmask(handlerNum, llevel, debug_domains, mask)
+    setEventMask.needconfig = False
+    setEventMask.readonly = True
+
+    def setFeatures(self, command, params):
+        """
+        Set the cooker features to include the passed list of features
+        """
+        features = params[0]
+        command.cooker.setFeatures(features)
+    setFeatures.needconfig = False
+    # although we change the internal state of the cooker, this is transparent since
+    # we always take and leave the cooker in state.initial
+    setFeatures.readonly = True
+
+    def updateConfig(self, command, params):
+        options = params[0]
+        environment = params[1]
+        command.cooker.updateConfigOpts(options, environment)
+    updateConfig.needconfig = False
+
+class CommandsAsync:
+    """
+    A class of asynchronous commands
+    These functions communicate via generated events.
+    Any function that requires metadata parsing should be here.
+    """
+
+    def buildFile(self, command, params):
+        """
+        Build a single specified .bb file
+        """
+        bfile = params[0]
+        task = params[1]
+
+        command.cooker.buildFile(bfile, task)
+    buildFile.needcache = False
+
+    def buildTargets(self, command, params):
+        """
+        Build a set of targets
+        """
+        pkgs_to_build = params[0]
+        task = params[1]
+
+        command.cooker.buildTargets(pkgs_to_build, task)
+    buildTargets.needcache = True
+
+    def generateDepTreeEvent(self, command, params):
+        """
+        Generate an event containing the dependency information
+        """
+        pkgs_to_build = params[0]
+        task = params[1]
+
+        command.cooker.generateDepTreeEvent(pkgs_to_build, task)
+        command.finishAsyncCommand()
+    generateDepTreeEvent.needcache = True
+
+    def generateDotGraph(self, command, params):
+        """
+        Dump dependency information to disk as .dot files
+        """
+        pkgs_to_build = params[0]
+        task = params[1]
+
+        command.cooker.generateDotGraphFiles(pkgs_to_build, task)
+        command.finishAsyncCommand()
+    generateDotGraph.needcache = True
+
+    def generateTargetsTree(self, command, params):
+        """
+        Generate a tree of buildable targets.
+        If klass is provided ensure all recipes that inherit the class are
+        included in the package list.
+        If pkg_list provided use that list (plus any extras brought in by
+        klass) rather than generating a tree for all packages.
+        """
+        klass = params[0]
+        pkg_list = params[1]
+
+        command.cooker.generateTargetsTree(klass, pkg_list)
+        command.finishAsyncCommand()
+    generateTargetsTree.needcache = True
+
+    def findCoreBaseFiles(self, command, params):
+        """
+        Find certain files in COREBASE directory. i.e. Layers
+        """
+        subdir = params[0]
+        filename = params[1]
+
+        command.cooker.findCoreBaseFiles(subdir, filename)
+        command.finishAsyncCommand()
+    findCoreBaseFiles.needcache = False
+
+    def findConfigFiles(self, command, params):
+        """
+        Find config files which provide appropriate values
+        for the passed configuration variable. i.e. MACHINE
+        """
+        varname = params[0]
+
+        command.cooker.findConfigFiles(varname)
+        command.finishAsyncCommand()
+    findConfigFiles.needcache = False
+
+    def findFilesMatchingInDir(self, command, params):
+        """
+        Find implementation files matching the specified pattern
+        in the requested subdirectory of a BBPATH
+        """
+        pattern = params[0]
+        directory = params[1]
+
+        command.cooker.findFilesMatchingInDir(pattern, directory)
+        command.finishAsyncCommand()
+    findFilesMatchingInDir.needcache = False
+
+    def findConfigFilePath(self, command, params):
+        """
+        Find the path of the requested configuration file
+        """
+        configfile = params[0]
+
+        command.cooker.findConfigFilePath(configfile)
+        command.finishAsyncCommand()
+    findConfigFilePath.needcache = False
+
+    def showVersions(self, command, params):
+        """
+        Show the currently selected versions
+        """
+        command.cooker.showVersions()
+        command.finishAsyncCommand()
+    showVersions.needcache = True
+
+    def showEnvironmentTarget(self, command, params):
+        """
+        Print the environment of a target recipe
+        (needs the cache to work out which recipe to use)
+        """
+        pkg = params[0]
+
+        command.cooker.showEnvironment(None, pkg)
+        command.finishAsyncCommand()
+    showEnvironmentTarget.needcache = True
+
+    def showEnvironment(self, command, params):
+        """
+        Print the standard environment
+        or if specified the environment for a specified recipe
+        """
+        bfile = params[0]
+
+        command.cooker.showEnvironment(bfile)
+        command.finishAsyncCommand()
+    showEnvironment.needcache = False
+
+    def parseFiles(self, command, params):
+        """
+        Parse the .bb files
+        """
+        command.cooker.updateCache()
+        command.finishAsyncCommand()
+    parseFiles.needcache = True
+
+    def compareRevisions(self, command, params):
+        """
+        Parse the .bb files
+        """
+        if bb.fetch.fetcher_compare_revisions(command.cooker.data):
+            command.finishAsyncCommand(code=1)
+        else:
+            command.finishAsyncCommand()
+    compareRevisions.needcache = True
+
+    def triggerEvent(self, command, params):
+        """
+        Trigger a certain event
+        """
+        event = params[0]
+        bb.event.fire(eval(event), command.cooker.data)
+        command.currentAsyncCommand = None
+    triggerEvent.needcache = False
+
+    def resetCooker(self, command, params):
+        """
+        Reset the cooker to its initial state, thus forcing a reparse for
+        any async command that has the needcache property set to True
+        """
+        command.cooker.reset()
+        command.finishAsyncCommand()
+    resetCooker.needcache = False
+

+ 6 - 0
bitbake/lib/bb/compat.py

@@ -0,0 +1,6 @@
+"""Code pulled from future python versions, here for compatibility"""
+
+from collections import MutableMapping, KeysView, ValuesView, ItemsView, OrderedDict
+from functools import total_ordering
+
+

+ 2188 - 0
bitbake/lib/bb/cooker.py

@@ -0,0 +1,2188 @@
+#!/usr/bin/env python
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Copyright (C) 2003, 2004  Chris Larson
+# Copyright (C) 2003, 2004  Phil Blundell
+# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
+# Copyright (C) 2005        Holger Hans Peter Freyther
+# Copyright (C) 2005        ROAD GmbH
+# Copyright (C) 2006 - 2007 Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+from __future__ import print_function
+import sys, os, glob, os.path, re, time
+import atexit
+import itertools
+import logging
+import multiprocessing
+import sre_constants
+import threading
+from cStringIO import StringIO
+from contextlib import closing
+from functools import wraps
+from collections import defaultdict
+import bb, bb.exceptions, bb.command
+from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
+import Queue
+import signal
+import subprocess
+import errno
+import prserv.serv
+import pyinotify
+
+logger      = logging.getLogger("BitBake")
+collectlog  = logging.getLogger("BitBake.Collection")
+buildlog    = logging.getLogger("BitBake.Build")
+parselog    = logging.getLogger("BitBake.Parsing")
+providerlog = logging.getLogger("BitBake.Provider")
+
+class NoSpecificMatch(bb.BBHandledException):
+    """
+    Exception raised when no or multiple file matches are found
+    """
+
+class NothingToBuild(Exception):
+    """
+    Exception raised when there is nothing to build
+    """
+
+class CollectionError(bb.BBHandledException):
+    """
+    Exception raised when layer configuration is incorrect
+    """
+
+class state:
+    initial, parsing, running, shutdown, forceshutdown, stopped, error = range(7)
+
+    @classmethod
+    def get_name(cls, code):
+        for name in dir(cls):
+            value = getattr(cls, name)
+            if type(value) == type(cls.initial) and value == code:
+                return name
+        raise ValueError("Invalid status code: %s" % code)
+
+
+class SkippedPackage:
+    def __init__(self, info = None, reason = None):
+        self.pn = None
+        self.skipreason = None
+        self.provides = None
+        self.rprovides = None
+
+        if info:
+            self.pn = info.pn
+            self.skipreason = info.skipreason
+            self.provides = info.provides
+            self.rprovides = info.rprovides
+        elif reason:
+            self.skipreason = reason
+
+
+class CookerFeatures(object):
+    _feature_list = [HOB_EXTRA_CACHES, SEND_DEPENDS_TREE, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = range(4)
+
+    def __init__(self):
+        self._features=set()
+
+    def setFeature(self, f):
+        # validate we got a request for a feature we support
+        if f not in CookerFeatures._feature_list:
+            return
+        self._features.add(f)
+
+    def __contains__(self, f):
+        return f in self._features
+
+    def __iter__(self):
+        return self._features.__iter__()
+
+    def next(self):
+        return self._features.next()
+
+
+#============================================================================#
+# BBCooker
+#============================================================================#
+class BBCooker:
+    """
+    Manages one bitbake build run
+    """
+
+    def __init__(self, configuration, featureSet=None):
+        self.recipecache = None
+        self.skiplist = {}
+        self.featureset = CookerFeatures()
+        if featureSet:
+            for f in featureSet:
+                self.featureset.setFeature(f)
+
+        self.configuration = configuration
+
+        self.configwatcher = pyinotify.WatchManager()
+        self.configwatcher.bbseen = []
+        self.configwatcher.bbwatchedfiles = []
+        self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
+        self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
+                         pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
+                         pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO 
+        self.watcher = pyinotify.WatchManager()
+        self.watcher.bbseen = []
+        self.watcher.bbwatchedfiles = []
+        self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
+
+        # If being called by something like tinfoil, we need to clean cached data 
+        # which may now be invalid
+        bb.parse.__mtime_cache = {}
+        bb.parse.BBHandler.cached_statements = {}
+
+        self.initConfigurationData()
+
+        self.inotify_modified_files = []
+
+        def _process_inotify_updates(server, notifier_list, abort):
+            for n in notifier_list:
+                if n.check_events(timeout=0):
+                    # read notified events and enqeue them
+                    n.read_events()
+                    n.process_events()
+            return 1.0
+
+        self.configuration.server_register_idlecallback(_process_inotify_updates, [self.confignotifier, self.notifier])
+
+        self.baseconfig_valid = True
+        self.parsecache_valid = False
+
+        # Take a lock so only one copy of bitbake can run against a given build
+        # directory at a time
+        if not self.lockBitbake():
+            bb.fatal("Only one copy of bitbake should be run against a build directory")
+        try:
+            self.lock.seek(0)
+            self.lock.truncate()
+            if len(configuration.interface) >= 2:
+                self.lock.write("%s:%s\n" % (configuration.interface[0], configuration.interface[1]));
+            self.lock.flush()
+        except:
+            pass
+
+        # TOSTOP must not be set or our children will hang when they output
+        fd = sys.stdout.fileno()
+        if os.isatty(fd):
+            import termios
+            tcattr = termios.tcgetattr(fd)
+            if tcattr[3] & termios.TOSTOP:
+                buildlog.info("The terminal had the TOSTOP bit set, clearing...")
+                tcattr[3] = tcattr[3] & ~termios.TOSTOP
+                termios.tcsetattr(fd, termios.TCSANOW, tcattr)
+
+        self.command = bb.command.Command(self)
+        self.state = state.initial
+
+        self.parser = None
+
+        signal.signal(signal.SIGTERM, self.sigterm_exception)
+        # Let SIGHUP exit as SIGTERM
+        signal.signal(signal.SIGHUP, self.sigterm_exception)
+
+    def config_notifications(self, event):
+        if not event.pathname in self.configwatcher.bbwatchedfiles:
+            return
+        if not event.pathname in self.inotify_modified_files:
+            self.inotify_modified_files.append(event.pathname)
+        self.baseconfig_valid = False
+
+    def notifications(self, event):
+        if not event.pathname in self.inotify_modified_files:
+            self.inotify_modified_files.append(event.pathname)
+        self.parsecache_valid = False
+
+    def add_filewatch(self, deps, watcher=None):
+        if not watcher:
+            watcher = self.watcher
+        for i in deps:
+            watcher.bbwatchedfiles.append(i[0])
+            f = os.path.dirname(i[0])
+            if f in watcher.bbseen:
+                continue
+            watcher.bbseen.append(f)
+            watchtarget = None
+            while True:
+                # We try and add watches for files that don't exist but if they did, would influence
+                # the parser. The parent directory of these files may not exist, in which case we need 
+                # to watch any parent that does exist for changes.
+                try:
+                    watcher.add_watch(f, self.watchmask, quiet=False)
+                    if watchtarget:
+                        watcher.bbwatchedfiles.append(watchtarget)
+                    break
+                except pyinotify.WatchManagerError as e:
+                    if 'ENOENT' in str(e):
+                        watchtarget = f
+                        f = os.path.dirname(f)
+                        if f in watcher.bbseen:
+                            break
+                        watcher.bbseen.append(f)
+                        continue
+                    if 'ENOSPC' in str(e):
+                        providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
+                        providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
+                        providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
+                        providerlog.error("Root privilege is required to modify max_user_watches.")
+                    raise
+
+    def sigterm_exception(self, signum, stackframe):
+        if signum == signal.SIGTERM:
+            bb.warn("Cooker received SIGTERM, shutting down...")
+        elif signum == signal.SIGHUP:
+            bb.warn("Cooker received SIGHUP, shutting down...")
+        self.state = state.forceshutdown
+
+    def setFeatures(self, features):
+        # we only accept a new feature set if we're in state initial, so we can reset without problems
+        if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
+            raise Exception("Illegal state for feature set change")
+        original_featureset = list(self.featureset)
+        for feature in features:
+            self.featureset.setFeature(feature)
+        bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
+        if (original_featureset != list(self.featureset)) and self.state != state.error:
+            self.reset()
+
+    def initConfigurationData(self):
+
+        self.state = state.initial
+        self.caches_array = []
+
+        # Need to preserve BB_CONSOLELOG over resets
+        consolelog = None
+        if hasattr(self, "data"):
+            consolelog = self.data.getVar("BB_CONSOLELOG", True)
+
+        if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
+            self.enableDataTracking()
+
+        all_extra_cache_names = []
+        # We hardcode all known cache types in a single place, here.
+        if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
+            all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
+
+        caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
+
+        # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
+        # This is the entry point, no further check needed!
+        for var in caches_name_array:
+            try:
+                module_name, cache_name = var.split(':')
+                module = __import__(module_name, fromlist=(cache_name,))
+                self.caches_array.append(getattr(module, cache_name))
+            except ImportError as exc:
+                logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
+                sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
+
+        self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
+        self.databuilder.parseBaseConfiguration()
+        self.data = self.databuilder.data
+        self.data_hash = self.databuilder.data_hash
+
+        if consolelog:
+            self.data.setVar("BB_CONSOLELOG", consolelog)
+
+        # we log all events to a file if so directed
+        if self.configuration.writeeventlog:
+            import json, pickle
+            DEFAULT_EVENTFILE = self.configuration.writeeventlog
+            class EventLogWriteHandler():
+
+                class EventWriter():
+                    def __init__(self, cooker):
+                        self.file_inited = None
+                        self.cooker = cooker
+                        self.event_queue = []
+
+                    def init_file(self):
+                        try:
+                            # delete the old log
+                            os.remove(DEFAULT_EVENTFILE)
+                        except:
+                            pass
+
+                        # write current configuration data
+                        with open(DEFAULT_EVENTFILE, "w") as f:
+                            f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
+
+                    def write_event(self, event):
+                        with open(DEFAULT_EVENTFILE, "a") as f:
+                            try:
+                                f.write("%s\n" % json.dumps({"class":event.__module__ + "." + event.__class__.__name__, "vars":json.dumps(pickle.dumps(event)) }))
+                            except Exception as e:
+                                import traceback
+                                print(e, traceback.format_exc(e))
+
+
+                    def send(self, event):
+                        event_class = event.__module__ + "." + event.__class__.__name__
+
+                        # init on bb.event.BuildStarted
+                        if self.file_inited is None:
+                            if  event_class == "bb.event.BuildStarted":
+                                self.init_file()
+                                self.file_inited = True
+
+                                # write pending events
+                                for e in self.event_queue:
+                                    self.write_event(e)
+
+                                # also write the current event
+                                self.write_event(event)
+
+                            else:
+                                # queue all events until the file is inited
+                                self.event_queue.append(event)
+
+                        else:
+                            # we have the file, just write the event
+                            self.write_event(event)
+
+                # set our handler's event processor
+                event = EventWriter(self)       # self is the cooker here
+
+
+            # set up cooker features for this mock UI handler
+
+            # we need to write the dependency tree in the log
+            self.featureset.setFeature(CookerFeatures.SEND_DEPENDS_TREE)
+            # register the log file writer as UI Handler
+            bb.event.register_UIHhandler(EventLogWriteHandler())
+
+
+        #
+        # Copy of the data store which has been expanded.
+        # Used for firing events and accessing variables where expansion needs to be accounted for
+        #
+        self.expanded_data = bb.data.createCopy(self.data)
+        bb.data.update_data(self.expanded_data)
+        bb.parse.init_parser(self.expanded_data)
+
+        if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
+            self.disableDataTracking()
+
+        self.data.renameVar("__depends", "__base_depends")
+        self.add_filewatch(self.data.getVar("__base_depends", False), self.configwatcher)
+
+
+    def enableDataTracking(self):
+        self.configuration.tracking = True
+        if hasattr(self, "data"):
+            self.data.enableTracking()
+
+    def disableDataTracking(self):
+        self.configuration.tracking = False
+        if hasattr(self, "data"):
+            self.data.disableTracking()
+
+    def modifyConfigurationVar(self, var, val, default_file, op):
+        if op == "append":
+            self.appendConfigurationVar(var, val, default_file)
+        elif op == "set":
+            self.saveConfigurationVar(var, val, default_file, "=")
+        elif op == "earlyAssign":
+            self.saveConfigurationVar(var, val, default_file, "?=")
+
+
+    def appendConfigurationVar(self, var, val, default_file):
+        #add append var operation to the end of default_file
+        default_file = bb.cookerdata.findConfigFile(default_file, self.data)
+
+        total = "#added by hob"
+        total += "\n%s += \"%s\"\n" % (var, val)
+
+        with open(default_file, 'a') as f:
+            f.write(total)
+
+        #add to history
+        loginfo = {"op":"append", "file":default_file, "line":total.count("\n")}
+        self.data.appendVar(var, val, **loginfo)
+
+    def saveConfigurationVar(self, var, val, default_file, op):
+
+        replaced = False
+        #do not save if nothing changed
+        if str(val) == self.data.getVar(var, False):
+            return
+
+        conf_files = self.data.varhistory.get_variable_files(var)
+
+        #format the value when it is a list
+        if isinstance(val, list):
+            listval = ""
+            for value in val:
+                listval += "%s   " % value
+            val = listval
+
+        topdir = self.data.getVar("TOPDIR", False)
+
+        #comment or replace operations made on var
+        for conf_file in conf_files:
+            if topdir in conf_file:
+                with open(conf_file, 'r') as f:
+                    contents = f.readlines()
+
+                lines = self.data.varhistory.get_variable_lines(var, conf_file)
+                for line in lines:
+                    total = ""
+                    i = 0
+                    for c in contents:
+                        total += c
+                        i = i + 1
+                        if i==int(line):
+                            end_index = len(total)
+                    index = total.rfind(var, 0, end_index)
+
+                    begin_line = total.count("\n",0,index)
+                    end_line = int(line)
+
+                    #check if the variable was saved before in the same way
+                    #if true it replace the place where the variable was declared
+                    #else it comments it
+                    if contents[begin_line-1]== "#added by hob\n":
+                        contents[begin_line] = "%s %s \"%s\"\n" % (var, op, val)
+                        replaced = True
+                    else:
+                        for ii in range(begin_line, end_line):
+                            contents[ii] = "#" + contents[ii]
+
+                with open(conf_file, 'w') as f:
+                    f.writelines(contents)
+
+        if replaced == False:
+            #remove var from history
+            self.data.varhistory.del_var_history(var)
+
+            #add var to the end of default_file
+            default_file = bb.cookerdata.findConfigFile(default_file, self.data)
+
+            #add the variable on a single line, to be easy to replace the second time
+            total = "\n#added by hob"
+            total += "\n%s %s \"%s\"\n" % (var, op, val)
+
+            with open(default_file, 'a') as f:
+                f.write(total)
+
+            #add to history
+            loginfo = {"op":"set", "file":default_file, "line":total.count("\n")}
+            self.data.setVar(var, val, **loginfo)
+
+    def removeConfigurationVar(self, var):
+        conf_files = self.data.varhistory.get_variable_files(var)
+        topdir = self.data.getVar("TOPDIR", False)
+
+        for conf_file in conf_files:
+            if topdir in conf_file:
+                with open(conf_file, 'r') as f:
+                    contents = f.readlines()
+
+                lines = self.data.varhistory.get_variable_lines(var, conf_file)
+                for line in lines:
+                    total = ""
+                    i = 0
+                    for c in contents:
+                        total += c
+                        i = i + 1
+                        if i==int(line):
+                            end_index = len(total)
+                    index = total.rfind(var, 0, end_index)
+
+                    begin_line = total.count("\n",0,index)
+
+                    #check if the variable was saved before in the same way
+                    if contents[begin_line-1]== "#added by hob\n":
+                        contents[begin_line-1] = contents[begin_line] = "\n"
+                    else:
+                        contents[begin_line] = "\n"
+                    #remove var from history
+                    self.data.varhistory.del_var_history(var, conf_file, line)
+                    #remove variable
+                    self.data.delVar(var)
+
+                with open(conf_file, 'w') as f:
+                    f.writelines(contents)
+
+    def createConfigFile(self, name):
+        path = os.getcwd()
+        confpath = os.path.join(path, "conf", name)
+        open(confpath, 'w').close()
+
+    def parseConfiguration(self):
+        # Set log file verbosity
+        verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
+        if verboselogs:
+            bb.msg.loggerVerboseLogs = True
+
+        # Change nice level if we're asked to
+        nice = self.data.getVar("BB_NICE_LEVEL", True)
+        if nice:
+            curnice = os.nice(0)
+            nice = int(nice) - curnice
+            buildlog.verbose("Renice to %s " % os.nice(nice))
+
+        if self.recipecache:
+            del self.recipecache
+        self.recipecache = bb.cache.CacheData(self.caches_array)
+
+        self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) )
+
+    def updateConfigOpts(self, options, environment):
+        clean = True
+        for o in options:
+            if o in ['prefile', 'postfile']:
+                clean = False
+                server_val = getattr(self.configuration, "%s_server" % o)
+                if not options[o] and server_val:
+                    # restore value provided on server start
+                    setattr(self.configuration, o, server_val)
+                    continue
+            setattr(self.configuration, o, options[o])
+        for k in bb.utils.approved_variables():
+            if k in environment and k not in self.configuration.env:
+                logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
+                self.configuration.env[k] = environment[k]
+                clean = False
+            if k in self.configuration.env and k not in environment:
+                logger.debug(1, "Updating environment variable %s (deleted)" % (k))
+                del self.configuration.env[k]
+                clean = False
+            if k not in self.configuration.env and k not in environment:
+                 continue
+            if environment[k] != self.configuration.env[k]:
+                logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
+                self.configuration.env[k] = environment[k]
+                clean = False
+        if not clean:
+            logger.debug(1, "Base environment change, triggering reparse")
+            self.baseconfig_valid = False        
+            self.reset()
+
+    def runCommands(self, server, data, abort):
+        """
+        Run any queued asynchronous command
+        This is done by the idle handler so it runs in true context rather than
+        tied to any UI.
+        """
+
+        return self.command.runAsyncCommand()
+
+    def showVersions(self):
+
+        pkg_pn = self.recipecache.pkg_pn
+        (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecache, pkg_pn)
+
+        logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
+        logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
+
+        for p in sorted(pkg_pn):
+            pref = preferred_versions[p]
+            latest = latest_versions[p]
+
+            prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
+            lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
+
+            if pref == latest:
+                prefstr = ""
+
+            logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
+
+    def showEnvironment(self, buildfile=None, pkgs_to_build=None):
+        """
+        Show the outer or per-recipe environment
+        """
+        fn = None
+        envdata = None
+        if not pkgs_to_build:
+            pkgs_to_build = []
+
+        if buildfile:
+            # Parse the configuration here. We need to do it explicitly here since
+            # this showEnvironment() code path doesn't use the cache
+            self.parseConfiguration()
+
+            fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
+            fn = self.matchFile(fn)
+            fn = bb.cache.Cache.realfn2virtual(fn, cls)
+        elif len(pkgs_to_build) == 1:
+            ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
+            if pkgs_to_build[0] in set(ignore.split()):
+                bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
+
+            taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
+
+            targetid = taskdata.getbuild_id(pkgs_to_build[0])
+            fnid = taskdata.build_targets[targetid][0]
+            fn = taskdata.fn_index[fnid]
+        else:
+            envdata = self.data
+
+        if fn:
+            try:
+                envdata = bb.cache.Cache.loadDataFull(fn, self.collection.get_file_appends(fn), self.data)
+            except Exception as e:
+                parselog.exception("Unable to read %s", fn)
+                raise
+
+        # Display history
+        with closing(StringIO()) as env:
+            self.data.inchistory.emit(env)
+            logger.plain(env.getvalue())
+
+        # emit variables and shell functions
+        data.update_data(envdata)
+        with closing(StringIO()) as env:
+            data.emit_env(env, envdata, True)
+            logger.plain(env.getvalue())
+
+        # emit the metadata which isnt valid shell
+        data.expandKeys(envdata)
+        for e in envdata.keys():
+            if data.getVarFlag( e, 'python', envdata ):
+                logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
+
+
+    def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
+        """
+        Prepare a runqueue and taskdata object for iteration over pkgs_to_build
+        """
+        bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
+
+        # A task of None means use the default task
+        if task is None:
+            task = self.configuration.cmd
+
+        fulltargetlist = self.checkPackages(pkgs_to_build)
+
+        localdata = data.createCopy(self.data)
+        bb.data.update_data(localdata)
+        bb.data.expandKeys(localdata)
+        taskdata = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
+
+        current = 0
+        runlist = []
+        for k in fulltargetlist:
+            ktask = task
+            if ":do_" in k:
+                k2 = k.split(":do_")
+                k = k2[0]
+                ktask = k2[1]
+            taskdata.add_provider(localdata, self.recipecache, k)
+            current += 1
+            if not ktask.startswith("do_"):
+                ktask = "do_%s" % ktask
+            runlist.append([k, ktask])
+            bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
+        taskdata.add_unresolved(localdata, self.recipecache)
+        bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
+        return taskdata, runlist, fulltargetlist
+
+    def prepareTreeData(self, pkgs_to_build, task):
+        """
+        Prepare a runqueue and taskdata object for iteration over pkgs_to_build
+        """
+
+        # We set abort to False here to prevent unbuildable targets raising
+        # an exception when we're just generating data
+        taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
+
+        return runlist, taskdata
+
+    ######## WARNING : this function requires cache_extra to be enabled ########
+
+    def generateTaskDepTreeData(self, pkgs_to_build, task):
+        """
+        Create a dependency graph of pkgs_to_build including reverse dependency
+        information.
+        """
+        runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
+        rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
+        rq.rqdata.prepare()
+        return self.buildDependTree(rq, taskdata)
+
+
+    def buildDependTree(self, rq, taskdata):
+        seen_fnids = []
+        depend_tree = {}
+        depend_tree["depends"] = {}
+        depend_tree["tdepends"] = {}
+        depend_tree["pn"] = {}
+        depend_tree["rdepends-pn"] = {}
+        depend_tree["packages"] = {}
+        depend_tree["rdepends-pkg"] = {}
+        depend_tree["rrecs-pkg"] = {}
+        depend_tree['providermap'] = {}
+        depend_tree["layer-priorities"] = self.recipecache.bbfile_config_priorities
+
+        for name, fn in taskdata.get_providermap().iteritems():
+            pn = self.recipecache.pkg_fn[fn]
+            if name != pn:
+                version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
+                depend_tree['providermap'][name] = (pn, version)
+
+        for task in xrange(len(rq.rqdata.runq_fnid)):
+            taskname = rq.rqdata.runq_task[task]
+            fnid = rq.rqdata.runq_fnid[task]
+            fn = taskdata.fn_index[fnid]
+            pn = self.recipecache.pkg_fn[fn]
+            version  = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
+            if pn not in depend_tree["pn"]:
+                depend_tree["pn"][pn] = {}
+                depend_tree["pn"][pn]["filename"] = fn
+                depend_tree["pn"][pn]["version"] = version
+                depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
+
+                # if we have extra caches, list all attributes they bring in
+                extra_info = []
+                for cache_class in self.caches_array:
+                    if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
+                        cachefields = getattr(cache_class, 'cachefields', [])
+                        extra_info = extra_info + cachefields
+
+                # for all attributes stored, add them to the dependency tree
+                for ei in extra_info:
+                    depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
+
+
+            for dep in rq.rqdata.runq_depends[task]:
+                depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]]
+                deppn = self.recipecache.pkg_fn[depfn]
+                dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task])
+                if not dotname in depend_tree["tdepends"]:
+                    depend_tree["tdepends"][dotname] = []
+                depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep]))
+            if fnid not in seen_fnids:
+                seen_fnids.append(fnid)
+                packages = []
+
+                depend_tree["depends"][pn] = []
+                for dep in taskdata.depids[fnid]:
+                    depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
+
+                depend_tree["rdepends-pn"][pn] = []
+                for rdep in taskdata.rdepids[fnid]:
+                    depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
+
+                rdepends = self.recipecache.rundeps[fn]
+                for package in rdepends:
+                    depend_tree["rdepends-pkg"][package] = []
+                    for rdepend in rdepends[package]:
+                        depend_tree["rdepends-pkg"][package].append(rdepend)
+                    packages.append(package)
+
+                rrecs = self.recipecache.runrecs[fn]
+                for package in rrecs:
+                    depend_tree["rrecs-pkg"][package] = []
+                    for rdepend in rrecs[package]:
+                        depend_tree["rrecs-pkg"][package].append(rdepend)
+                    if not package in packages:
+                        packages.append(package)
+
+                for package in packages:
+                    if package not in depend_tree["packages"]:
+                        depend_tree["packages"][package] = {}
+                        depend_tree["packages"][package]["pn"] = pn
+                        depend_tree["packages"][package]["filename"] = fn
+                        depend_tree["packages"][package]["version"] = version
+
+        return depend_tree
+
+    ######## WARNING : this function requires cache_extra to be enabled ########
+    def generatePkgDepTreeData(self, pkgs_to_build, task):
+        """
+        Create a dependency tree of pkgs_to_build, returning the data.
+        """
+        _, taskdata = self.prepareTreeData(pkgs_to_build, task)
+        tasks_fnid = []
+        if len(taskdata.tasks_name) != 0:
+            for task in xrange(len(taskdata.tasks_name)):
+                tasks_fnid.append(taskdata.tasks_fnid[task])
+
+        seen_fnids = []
+        depend_tree = {}
+        depend_tree["depends"] = {}
+        depend_tree["pn"] = {}
+        depend_tree["rdepends-pn"] = {}
+        depend_tree["rdepends-pkg"] = {}
+        depend_tree["rrecs-pkg"] = {}
+
+        # if we have extra caches, list all attributes they bring in
+        extra_info = []
+        for cache_class in self.caches_array:
+            if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
+                cachefields = getattr(cache_class, 'cachefields', [])
+                extra_info = extra_info + cachefields
+
+        for task in xrange(len(tasks_fnid)):
+            fnid = tasks_fnid[task]
+            fn = taskdata.fn_index[fnid]
+            pn = self.recipecache.pkg_fn[fn]
+
+            if pn not in depend_tree["pn"]:
+                depend_tree["pn"][pn] = {}
+                depend_tree["pn"][pn]["filename"] = fn
+                version  = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
+                depend_tree["pn"][pn]["version"] = version
+                rdepends = self.recipecache.rundeps[fn]
+                rrecs = self.recipecache.runrecs[fn]
+                depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
+
+                # for all extra attributes stored, add them to the dependency tree
+                for ei in extra_info:
+                    depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
+
+            if fnid not in seen_fnids:
+                seen_fnids.append(fnid)
+
+                depend_tree["depends"][pn] = []
+                for dep in taskdata.depids[fnid]:
+                    item = taskdata.build_names_index[dep]
+                    pn_provider = ""
+                    targetid = taskdata.getbuild_id(item)
+                    if targetid in taskdata.build_targets and taskdata.build_targets[targetid]:
+                        id = taskdata.build_targets[targetid][0]
+                        fn_provider = taskdata.fn_index[id]
+                        pn_provider = self.recipecache.pkg_fn[fn_provider]
+                    else:
+                        pn_provider = item
+                    depend_tree["depends"][pn].append(pn_provider)
+
+                depend_tree["rdepends-pn"][pn] = []
+                for rdep in taskdata.rdepids[fnid]:
+                    item = taskdata.run_names_index[rdep]
+                    pn_rprovider = ""
+                    targetid = taskdata.getrun_id(item)
+                    if targetid in taskdata.run_targets and taskdata.run_targets[targetid]:
+                        id = taskdata.run_targets[targetid][0]
+                        fn_rprovider = taskdata.fn_index[id]
+                        pn_rprovider = self.recipecache.pkg_fn[fn_rprovider]
+                    else:
+                        pn_rprovider = item
+                    depend_tree["rdepends-pn"][pn].append(pn_rprovider)
+
+                depend_tree["rdepends-pkg"].update(rdepends)
+                depend_tree["rrecs-pkg"].update(rrecs)
+
+        return depend_tree
+
+    def generateDepTreeEvent(self, pkgs_to_build, task):
+        """
+        Create a task dependency graph of pkgs_to_build.
+        Generate an event with the result
+        """
+        depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
+        bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
+
+    def generateDotGraphFiles(self, pkgs_to_build, task):
+        """
+        Create a task dependency graph of pkgs_to_build.
+        Save the result to a set of .dot files.
+        """
+
+        depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
+
+        # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
+        depends_file = file('pn-depends.dot', 'w' )
+        buildlist_file = file('pn-buildlist', 'w' )
+        print("digraph depends {", file=depends_file)
+        for pn in depgraph["pn"]:
+            fn = depgraph["pn"][pn]["filename"]
+            version = depgraph["pn"][pn]["version"]
+            print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
+            print("%s" % pn, file=buildlist_file)
+        buildlist_file.close()
+        logger.info("PN build list saved to 'pn-buildlist'")
+        for pn in depgraph["depends"]:
+            for depend in depgraph["depends"][pn]:
+                print('"%s" -> "%s" [style=solid]' % (pn, depend), file=depends_file)
+        for pn in depgraph["rdepends-pn"]:
+            for rdepend in depgraph["rdepends-pn"][pn]:
+                print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
+        print("}", file=depends_file)
+        logger.info("PN dependencies saved to 'pn-depends.dot'")
+
+        depends_file = file('package-depends.dot', 'w' )
+        print("digraph depends {", file=depends_file)
+        for package in depgraph["packages"]:
+            pn = depgraph["packages"][package]["pn"]
+            fn = depgraph["packages"][package]["filename"]
+            version = depgraph["packages"][package]["version"]
+            if package == pn:
+                print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
+            else:
+                print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file)
+            for depend in depgraph["depends"][pn]:
+                print('"%s" -> "%s" [style=solid]' % (package, depend), file=depends_file)
+        for package in depgraph["rdepends-pkg"]:
+            for rdepend in depgraph["rdepends-pkg"][package]:
+                print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
+        for package in depgraph["rrecs-pkg"]:
+            for rdepend in depgraph["rrecs-pkg"][package]:
+                print('"%s" -> "%s" [style=dotted]' % (package, rdepend), file=depends_file)
+        print("}", file=depends_file)
+        logger.info("Package dependencies saved to 'package-depends.dot'")
+
+        tdepends_file = file('task-depends.dot', 'w' )
+        print("digraph depends {", file=tdepends_file)
+        for task in depgraph["tdepends"]:
+            (pn, taskname) = task.rsplit(".", 1)
+            fn = depgraph["pn"][pn]["filename"]
+            version = depgraph["pn"][pn]["version"]
+            print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file)
+            for dep in depgraph["tdepends"][task]:
+                print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
+        print("}", file=tdepends_file)
+        logger.info("Task dependencies saved to 'task-depends.dot'")
+
+    def show_appends_with_no_recipes(self):
+        # Determine which bbappends haven't been applied
+
+        # First get list of recipes, including skipped
+        recipefns = self.recipecache.pkg_fn.keys()
+        recipefns.extend(self.skiplist.keys())
+
+        # Work out list of bbappends that have been applied
+        applied_appends = []
+        for fn in recipefns:
+            applied_appends.extend(self.collection.get_file_appends(fn))
+
+        appends_without_recipes = []
+        for _, appendfn in self.collection.bbappends:
+            if not appendfn in applied_appends:
+                appends_without_recipes.append(appendfn)
+
+        if appends_without_recipes:
+            msg = 'No recipes available for:\n  %s' % '\n  '.join(appends_without_recipes)
+            warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
+                 False) or "no"
+            if warn_only.lower() in ("1", "yes", "true"):
+                bb.warn(msg)
+            else:
+                bb.fatal(msg)
+
+    def handlePrefProviders(self):
+
+        localdata = data.createCopy(self.data)
+        bb.data.update_data(localdata)
+        bb.data.expandKeys(localdata)
+
+        # Handle PREFERRED_PROVIDERS
+        for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split():
+            try:
+                (providee, provider) = p.split(':')
+            except:
+                providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
+                continue
+            if providee in self.recipecache.preferred and self.recipecache.preferred[providee] != provider:
+                providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecache.preferred[providee])
+            self.recipecache.preferred[providee] = provider
+
+    def findCoreBaseFiles(self, subdir, configfile):
+        corebase = self.data.getVar('COREBASE', True) or ""
+        paths = []
+        for root, dirs, files in os.walk(corebase + '/' + subdir):
+            for d in dirs:
+                configfilepath = os.path.join(root, d, configfile)
+                if os.path.exists(configfilepath):
+                    paths.append(os.path.join(root, d))
+
+        if paths:
+            bb.event.fire(bb.event.CoreBaseFilesFound(paths), self.data)
+
+    def findConfigFilePath(self, configfile):
+        """
+        Find the location on disk of configfile and if it exists and was parsed by BitBake
+        emit the ConfigFilePathFound event with the path to the file.
+        """
+        path = bb.cookerdata.findConfigFile(configfile, self.data)
+        if not path:
+            return
+
+        # Generate a list of parsed configuration files by searching the files
+        # listed in the __depends and __base_depends variables with a .conf suffix.
+        conffiles = []
+        dep_files = self.data.getVar('__base_depends', False) or []
+        dep_files = dep_files + (self.data.getVar('__depends', False) or [])
+
+        for f in dep_files:
+            if f[0].endswith(".conf"):
+                conffiles.append(f[0])
+
+        _, conf, conffile = path.rpartition("conf/")
+        match = os.path.join(conf, conffile)
+        # Try and find matches for conf/conffilename.conf as we don't always
+        # have the full path to the file.
+        for cfg in conffiles:
+            if cfg.endswith(match):
+                bb.event.fire(bb.event.ConfigFilePathFound(path),
+                              self.data)
+                break
+
+    def findFilesMatchingInDir(self, filepattern, directory):
+        """
+        Searches for files containing the substring 'filepattern' which are children of
+        'directory' in each BBPATH. i.e. to find all rootfs package classes available
+        to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
+        or to find all machine configuration files one could call:
+        findFilesMatchingInDir(self, '.conf', 'conf/machine')
+        """
+
+        matches = []
+        bbpaths = self.data.getVar('BBPATH', True).split(':')
+        for path in bbpaths:
+            dirpath = os.path.join(path, directory)
+            if os.path.exists(dirpath):
+                for root, dirs, files in os.walk(dirpath):
+                    for f in files:
+                        if filepattern in f:
+                            matches.append(f)
+
+        if matches:
+            bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
+
+    def findConfigFiles(self, varname):
+        """
+        Find config files which are appropriate values for varname.
+        i.e. MACHINE, DISTRO
+        """
+        possible = []
+        var = varname.lower()
+
+        data = self.data
+        # iterate configs
+        bbpaths = data.getVar('BBPATH', True).split(':')
+        for path in bbpaths:
+            confpath = os.path.join(path, "conf", var)
+            if os.path.exists(confpath):
+                for root, dirs, files in os.walk(confpath):
+                    # get all child files, these are appropriate values
+                    for f in files:
+                        val, sep, end = f.rpartition('.')
+                        if end == 'conf':
+                            possible.append(val)
+
+        if possible:
+            bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
+
+    def findInheritsClass(self, klass):
+        """
+        Find all recipes which inherit the specified class
+        """
+        pkg_list = []
+
+        for pfn in self.recipecache.pkg_fn:
+            inherits = self.recipecache.inherits.get(pfn, None)
+            if inherits and klass in inherits:
+                pkg_list.append(self.recipecache.pkg_fn[pfn])
+
+        return pkg_list
+
+    def generateTargetsTree(self, klass=None, pkgs=None):
+        """
+        Generate a dependency tree of buildable targets
+        Generate an event with the result
+        """
+        # if the caller hasn't specified a pkgs list default to universe
+        if not pkgs:
+            pkgs = ['universe']
+        # if inherited_class passed ensure all recipes which inherit the
+        # specified class are included in pkgs
+        if klass:
+            extra_pkgs = self.findInheritsClass(klass)
+            pkgs = pkgs + extra_pkgs
+
+        # generate a dependency tree for all our packages
+        tree = self.generatePkgDepTreeData(pkgs, 'build')
+        bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
+
+    def interactiveMode( self ):
+        """Drop off into a shell"""
+        try:
+            from bb import shell
+        except ImportError:
+            parselog.exception("Interactive mode not available")
+            sys.exit(1)
+        else:
+            shell.start( self )
+
+
+    def handleCollections( self, collections ):
+        """Handle collections"""
+        errors = False
+        self.recipecache.bbfile_config_priorities = []
+        if collections:
+            collection_priorities = {}
+            collection_depends = {}
+            collection_list = collections.split()
+            min_prio = 0
+            for c in collection_list:
+                # Get collection priority if defined explicitly
+                priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True)
+                if priority:
+                    try:
+                        prio = int(priority)
+                    except ValueError:
+                        parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
+                        errors = True
+                    if min_prio == 0 or prio < min_prio:
+                        min_prio = prio
+                    collection_priorities[c] = prio
+                else:
+                    collection_priorities[c] = None
+
+                # Check dependencies and store information for priority calculation
+                deps = self.data.getVar("LAYERDEPENDS_%s" % c, True)
+                if deps:
+                    try:
+                        deplist = bb.utils.explode_dep_versions2(deps)
+                    except bb.utils.VersionStringException as vse:
+                        bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
+                    for dep, oplist in deplist.iteritems():
+                        if dep in collection_list:
+                            for opstr in oplist:
+                                layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
+                                (op, depver) = opstr.split()
+                                if layerver:
+                                    try:
+                                        res = bb.utils.vercmp_string_op(layerver, depver, op)
+                                    except bb.utils.VersionStringException as vse:
+                                        bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
+                                    if not res:
+                                        parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
+                                        errors = True
+                                else:
+                                    parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
+                                    errors = True
+                        else:
+                            parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
+                            errors = True
+                    collection_depends[c] = deplist.keys()
+                else:
+                    collection_depends[c] = []
+
+            # Recursively work out collection priorities based on dependencies
+            def calc_layer_priority(collection):
+                if not collection_priorities[collection]:
+                    max_depprio = min_prio
+                    for dep in collection_depends[collection]:
+                        calc_layer_priority(dep)
+                        depprio = collection_priorities[dep]
+                        if depprio > max_depprio:
+                            max_depprio = depprio
+                    max_depprio += 1
+                    parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
+                    collection_priorities[collection] = max_depprio
+
+            # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
+            for c in collection_list:
+                calc_layer_priority(c)
+                regex = self.data.getVar("BBFILE_PATTERN_%s" % c, True)
+                if regex == None:
+                    parselog.error("BBFILE_PATTERN_%s not defined" % c)
+                    errors = True
+                    continue
+                try:
+                    cre = re.compile(regex)
+                except re.error:
+                    parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
+                    errors = True
+                    continue
+                self.recipecache.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
+        if errors:
+            # We've already printed the actual error(s)
+            raise CollectionError("Errors during parsing layer configuration")
+
+    def buildSetVars(self):
+        """
+        Setup any variables needed before starting a build
+        """
+        t = time.gmtime() 
+        if not self.data.getVar("BUILDNAME", False):
+            self.data.setVar("BUILDNAME", "${DATE}${TIME}")
+        self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
+        self.data.setVar("DATE", time.strftime('%Y%m%d', t))
+        self.data.setVar("TIME", time.strftime('%H%M%S', t))
+
+    def matchFiles(self, bf):
+        """
+        Find the .bb files which match the expression in 'buildfile'.
+        """
+        if bf.startswith("/") or bf.startswith("../"):
+            bf = os.path.abspath(bf)
+
+        self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
+        filelist, masked = self.collection.collect_bbfiles(self.data, self.expanded_data)
+        try:
+            os.stat(bf)
+            bf = os.path.abspath(bf)
+            return [bf]
+        except OSError:
+            regexp = re.compile(bf)
+            matches = []
+            for f in filelist:
+                if regexp.search(f) and os.path.isfile(f):
+                    matches.append(f)
+            return matches
+
+    def matchFile(self, buildfile):
+        """
+        Find the .bb file which matches the expression in 'buildfile'.
+        Raise an error if multiple files
+        """
+        matches = self.matchFiles(buildfile)
+        if len(matches) != 1:
+            if matches:
+                msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
+                if matches:
+                    for f in matches:
+                        msg += "\n    %s" % f
+                parselog.error(msg)
+            else:
+                parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
+            raise NoSpecificMatch
+        return matches[0]
+
+    def buildFile(self, buildfile, task):
+        """
+        Build the file matching regexp buildfile
+        """
+
+        # Too many people use -b because they think it's how you normally
+        # specify a target to be built, so show a warning
+        bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
+
+        # Parse the configuration here. We need to do it explicitly here since
+        # buildFile() doesn't use the cache
+        self.parseConfiguration()
+
+        # If we are told to do the None task then query the default task
+        if (task == None):
+            task = self.configuration.cmd
+
+        fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
+        fn = self.matchFile(fn)
+
+        self.buildSetVars()
+
+        infos = bb.cache.Cache.parse(fn, self.collection.get_file_appends(fn), \
+                                     self.data,
+                                     self.caches_array)
+        infos = dict(infos)
+
+        fn = bb.cache.Cache.realfn2virtual(fn, cls)
+        try:
+            info_array = infos[fn]
+        except KeyError:
+            bb.fatal("%s does not exist" % fn)
+
+        if info_array[0].skipped:
+            bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
+
+        self.recipecache.add_from_recipeinfo(fn, info_array)
+
+        # Tweak some variables
+        item = info_array[0].pn
+        self.recipecache.ignored_dependencies = set()
+        self.recipecache.bbfile_priority[fn] = 1
+
+        # Remove external dependencies
+        self.recipecache.task_deps[fn]['depends'] = {}
+        self.recipecache.deps[fn] = []
+        self.recipecache.rundeps[fn] = []
+        self.recipecache.runrecs[fn] = []
+
+        # Invalidate task for target if force mode active
+        if self.configuration.force:
+            logger.verbose("Invalidate task %s, %s", task, fn)
+            if not task.startswith("do_"):
+                task = "do_%s" % task
+            bb.parse.siggen.invalidate_task(task, self.recipecache, fn)
+
+        # Setup taskdata structure
+        taskdata = bb.taskdata.TaskData(self.configuration.abort)
+        taskdata.add_provider(self.data, self.recipecache, item)
+
+        buildname = self.data.getVar("BUILDNAME", True)
+        bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data)
+
+        # Execute the runqueue
+        if not task.startswith("do_"):
+            task = "do_%s" % task
+        runlist = [[item, task]]
+
+        rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
+
+        def buildFileIdle(server, rq, abort):
+
+            msg = None
+            interrupted = 0
+            if abort or self.state == state.forceshutdown:
+                rq.finish_runqueue(True)
+                msg = "Forced shutdown"
+                interrupted = 2
+            elif self.state == state.shutdown:
+                rq.finish_runqueue(False)
+                msg = "Stopped build"
+                interrupted = 1
+            failures = 0
+            try:
+                retval = rq.execute_runqueue()
+            except runqueue.TaskFailure as exc:
+                failures += len(exc.args)
+                retval = False
+            except SystemExit as exc:
+                self.command.finishAsyncCommand()
+                return False
+
+            if not retval:
+                bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures, interrupted), self.expanded_data)
+                self.command.finishAsyncCommand(msg)
+                return False
+            if retval is True:
+                return True
+            return retval
+
+        self.configuration.server_register_idlecallback(buildFileIdle, rq)
+
+    def buildTargets(self, targets, task):
+        """
+        Attempt to build the targets specified
+        """
+
+        def buildTargetsIdle(server, rq, abort):
+            msg = None
+            interrupted = 0
+            if abort or self.state == state.forceshutdown:
+                rq.finish_runqueue(True)
+                msg = "Forced shutdown"
+                interrupted = 2
+            elif self.state == state.shutdown:
+                rq.finish_runqueue(False)
+                msg = "Stopped build"
+                interrupted = 1
+            failures = 0
+            try:
+                retval = rq.execute_runqueue()
+            except runqueue.TaskFailure as exc:
+                failures += len(exc.args)
+                retval = False
+            except SystemExit as exc:
+                self.command.finishAsyncCommand()
+                return False
+
+            if not retval:
+                bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures, interrupted), self.data)
+                self.command.finishAsyncCommand(msg)
+                return False
+            if retval is True:
+                return True
+            return retval
+
+        build.reset_cache()
+        self.buildSetVars()
+
+        # If we are told to do the None task then query the default task
+        if (task == None):
+            task = self.configuration.cmd
+
+        if not task.startswith("do_"):
+            task = "do_%s" % task
+
+        taskdata, runlist, fulltargetlist = self.buildTaskData(targets, task, self.configuration.abort)
+
+        buildname = self.data.getVar("BUILDNAME", False)
+
+        # make targets to always look as <target>:do_<task>
+        ntargets = []
+        for target in fulltargetlist:
+            if ":" in target:
+                if ":do_" not in target:
+                    target = "%s:do_%s" % tuple(target.split(":", 1))
+            else:
+                target = "%s:%s" % (target, task)
+            ntargets.append(target)
+
+        bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.data)
+
+        rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
+        if 'universe' in targets:
+            rq.rqdata.warn_multi_bb = True
+
+        self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
+
+
+    def getAllKeysWithFlags(self, flaglist):
+        dump = {}
+        for k in self.data.keys():
+            try:
+                expand = True
+                flags = self.data.getVarFlags(k)
+                if flags and "func" in flags and "python" in flags:
+                    expand = False
+                v = self.data.getVar(k, expand)
+                if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
+                    dump[k] = {
+    'v' : v ,
+    'history' : self.data.varhistory.variable(k),
+                    }
+                    for d in flaglist:
+                        if flags and d in flags:
+                            dump[k][d] = flags[d]
+                        else:
+                            dump[k][d] = None
+            except Exception as e:
+                print(e)
+        return dump
+
+
+    def generateNewImage(self, image, base_image, package_queue, timestamp, description):
+        '''
+        Create a new image with a "require"/"inherit" base_image statement
+        '''
+        if timestamp:
+            image_name = os.path.splitext(image)[0]
+            timestr = time.strftime("-%Y%m%d-%H%M%S")
+            dest = image_name + str(timestr) + ".bb"
+        else:
+            if not image.endswith(".bb"):
+                dest = image + ".bb"
+            else:
+                dest = image
+
+        basename = False
+        if base_image:
+            with open(base_image, 'r') as f:
+                require_line = f.readline()
+                p = re.compile("IMAGE_BASENAME *=")
+                for line in f:
+                    if p.search(line):
+                        basename = True
+
+        with open(dest, "w") as imagefile:
+            if base_image is None:
+                imagefile.write("inherit core-image\n")
+            else:
+                topdir = self.data.getVar("TOPDIR", False)
+                if topdir in base_image:
+                    base_image = require_line.split()[1]
+                imagefile.write("require " + base_image + "\n")
+            image_install = "IMAGE_INSTALL = \""
+            for package in package_queue:
+                image_install += str(package) + " "
+            image_install += "\"\n"
+            imagefile.write(image_install)
+
+            description_var = "DESCRIPTION = \"" + description + "\"\n"
+            imagefile.write(description_var)
+
+            if basename:
+                # If this is overwritten in a inherited image, reset it to default
+                image_basename = "IMAGE_BASENAME = \"${PN}\"\n"
+                imagefile.write(image_basename)
+
+        self.state = state.initial
+        if timestamp:
+            return timestr
+
+    def updateCacheSync(self):
+        if self.state == state.running:
+            return
+
+        # reload files for which we got notifications
+        for p in self.inotify_modified_files:
+            bb.parse.update_cache(p)
+            if p in bb.parse.BBHandler.cached_statements:
+                del bb.parse.BBHandler.cached_statements[p]
+        self.inotify_modified_files = []
+
+        if not self.baseconfig_valid:
+            logger.debug(1, "Reloading base configuration data")
+            self.initConfigurationData()
+            self.baseconfig_valid = True
+            self.parsecache_valid = False
+
+    # This is called for all async commands when self.state != running
+    def updateCache(self):
+        if self.state == state.running:
+            return
+
+        if self.state in (state.shutdown, state.forceshutdown, state.error):
+            if hasattr(self.parser, 'shutdown'):
+                self.parser.shutdown(clean=False, force = True)
+            raise bb.BBHandledException()
+
+        if self.state != state.parsing:
+            self.updateCacheSync()
+
+        if self.state != state.parsing and not self.parsecache_valid:
+            self.parseConfiguration ()
+            if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
+                bb.event.fire(bb.event.SanityCheck(False), self.data)
+
+            ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
+            self.recipecache.ignored_dependencies = set(ignore.split())
+
+            for dep in self.configuration.extra_assume_provided:
+                self.recipecache.ignored_dependencies.add(dep)
+
+            self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
+            (filelist, masked) = self.collection.collect_bbfiles(self.data, self.expanded_data)
+
+            self.parser = CookerParser(self, filelist, masked)
+            self.parsecache_valid = True
+
+        self.state = state.parsing
+
+        if not self.parser.parse_next():
+            collectlog.debug(1, "parsing complete")
+            if self.parser.error:
+                raise bb.BBHandledException()
+            self.show_appends_with_no_recipes()
+            self.handlePrefProviders()
+            self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn, self.data)
+            self.state = state.running
+
+            # Send an event listing all stamps reachable after parsing
+            # which the metadata may use to clean up stale data
+            event = bb.event.ReachableStamps(self.recipecache.stamp)
+            bb.event.fire(event, self.expanded_data)
+            return None
+
+        return True
+
+    def checkPackages(self, pkgs_to_build):
+
+        # Return a copy, don't modify the original
+        pkgs_to_build = pkgs_to_build[:]
+
+        if len(pkgs_to_build) == 0:
+            raise NothingToBuild
+
+        ignore = (self.expanded_data.getVar("ASSUME_PROVIDED", True) or "").split()
+        for pkg in pkgs_to_build:
+            if pkg in ignore:
+                parselog.warn("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
+
+        if 'world' in pkgs_to_build:
+            bb.providers.buildWorldTargetList(self.recipecache)
+            pkgs_to_build.remove('world')
+            for t in self.recipecache.world_target:
+                pkgs_to_build.append(t)
+
+        if 'universe' in pkgs_to_build:
+            parselog.warn("The \"universe\" target is only intended for testing and may produce errors.")
+            parselog.debug(1, "collating packages for \"universe\"")
+            pkgs_to_build.remove('universe')
+            for t in self.recipecache.universe_target:
+                pkgs_to_build.append(t)
+
+        return pkgs_to_build
+
+
+
+
+    def pre_serve(self):
+        # Empty the environment. The environment will be populated as
+        # necessary from the data store.
+        #bb.utils.empty_environment()
+        try:
+            self.prhost = prserv.serv.auto_start(self.data)
+        except prserv.serv.PRServiceConfigError:
+            bb.event.fire(CookerExit(), self.expanded_data)
+            self.state = state.error
+        return
+
+    def post_serve(self):
+        prserv.serv.auto_shutdown(self.data)
+        bb.event.fire(CookerExit(), self.expanded_data)
+        lockfile = self.lock.name
+        self.lock.close()
+        self.lock = None
+
+        while not self.lock:
+            with bb.utils.timeout(3):
+                self.lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=True)
+                if not self.lock:
+                    # Some systems may not have lsof available
+                    procs = None
+                    try:
+                        procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
+                    except OSError as e:
+                        if e.errno != errno.ENOENT:
+                            raise
+                    if procs is None:
+                        # Fall back to fuser if lsof is unavailable
+                        try:
+                            procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
+                        except OSError as e:
+                            if e.errno != errno.ENOENT:
+                                raise
+
+                    msg = "Delaying shutdown due to active processes which appear to be holding bitbake.lock"
+                    if procs:
+                        msg += ":\n%s" % str(procs)
+                    print(msg)
+
+
+    def shutdown(self, force = False):
+        if force:
+            self.state = state.forceshutdown
+        else:
+            self.state = state.shutdown
+
+        if self.parser:
+            self.parser.shutdown(clean=not force, force=force)
+
+    def finishcommand(self):
+        self.state = state.initial
+
+    def reset(self):
+        self.initConfigurationData()
+
+    def lockBitbake(self):
+        if not hasattr(self, 'lock'):
+            self.lock = None
+            if self.data:
+                lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
+                if lockfile:
+                    self.lock = bb.utils.lockfile(lockfile, False, False)
+        return self.lock
+
+    def unlockBitbake(self):
+        if hasattr(self, 'lock') and self.lock:
+            bb.utils.unlockfile(self.lock)
+
+def server_main(cooker, func, *args):
+    cooker.pre_serve()
+
+    if cooker.configuration.profile:
+        try:
+            import cProfile as profile
+        except:
+            import profile
+        prof = profile.Profile()
+
+        ret = profile.Profile.runcall(prof, func, *args)
+
+        prof.dump_stats("profile.log")
+        bb.utils.process_profilelog("profile.log")
+        print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
+
+    else:
+        ret = func(*args)
+
+    cooker.post_serve()
+
+    return ret
+
+class CookerExit(bb.event.Event):
+    """
+    Notify clients of the Cooker shutdown
+    """
+
+    def __init__(self):
+        bb.event.Event.__init__(self)
+
+
+class CookerCollectFiles(object):
+    def __init__(self, priorities):
+        self.bbappends = []
+        self.bbfile_config_priorities = priorities
+
+    def calc_bbfile_priority( self, filename, matched = None ):
+        for _, _, regex, pri in self.bbfile_config_priorities:
+            if regex.match(filename):
+                if matched != None:
+                    if not regex in matched:
+                        matched.add(regex)
+                return pri
+        return 0
+
+    def get_bbfiles(self):
+        """Get list of default .bb files by reading out the current directory"""
+        path = os.getcwd()
+        contents = os.listdir(path)
+        bbfiles = []
+        for f in contents:
+            if f.endswith(".bb"):
+                bbfiles.append(os.path.abspath(os.path.join(path, f)))
+        return bbfiles
+
+    def find_bbfiles(self, path):
+        """Find all the .bb and .bbappend files in a directory"""
+        found = []
+        for dir, dirs, files in os.walk(path):
+            for ignored in ('SCCS', 'CVS', '.svn'):
+                if ignored in dirs:
+                    dirs.remove(ignored)
+            found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
+
+        return found
+
+    def collect_bbfiles(self, config, eventdata):
+        """Collect all available .bb build files"""
+        masked = 0
+
+        collectlog.debug(1, "collecting .bb files")
+
+        files = (config.getVar( "BBFILES", True) or "").split()
+        config.setVar("BBFILES", " ".join(files))
+
+        # Sort files by priority
+        files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
+
+        if not len(files):
+            files = self.get_bbfiles()
+
+        if not len(files):
+            collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
+            bb.event.fire(CookerExit(), eventdata)
+
+        # Can't use set here as order is important
+        newfiles = []
+        for f in files:
+            if os.path.isdir(f):
+                dirfiles = self.find_bbfiles(f)
+                for g in dirfiles:
+                    if g not in newfiles:
+                        newfiles.append(g)
+            else:
+                globbed = glob.glob(f)
+                if not globbed and os.path.exists(f):
+                    globbed = [f]
+                for g in globbed:
+                    if g not in newfiles:
+                        newfiles.append(g)
+
+        bbmask = config.getVar('BBMASK', True)
+
+        if bbmask:
+            # First validate the individual regular expressions and ignore any
+            # that do not compile
+            bbmasks = []
+            for mask in bbmask.split():
+                try:
+                    re.compile(mask)
+                    bbmasks.append(mask)
+                except sre_constants.error:
+                    collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
+
+            # Then validate the combined regular expressions. This should never
+            # fail, but better safe than sorry...
+            bbmask = "|".join(bbmasks)
+            try:
+                bbmask_compiled = re.compile(bbmask)
+            except sre_constants.error:
+                collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
+                bbmask = None
+
+        bbfiles = []
+        bbappend = []
+        for f in newfiles:
+            if bbmask and bbmask_compiled.search(f):
+                collectlog.debug(1, "skipping masked file %s", f)
+                masked += 1
+                continue
+            if f.endswith('.bb'):
+                bbfiles.append(f)
+            elif f.endswith('.bbappend'):
+                bbappend.append(f)
+            else:
+                collectlog.debug(1, "skipping %s: unknown file extension", f)
+
+        # Build a list of .bbappend files for each .bb file
+        for f in bbappend:
+            base = os.path.basename(f).replace('.bbappend', '.bb')
+            self.bbappends.append((base, f))
+
+        # Find overlayed recipes
+        # bbfiles will be in priority order which makes this easy
+        bbfile_seen = dict()
+        self.overlayed = defaultdict(list)
+        for f in reversed(bbfiles):
+            base = os.path.basename(f)
+            if base not in bbfile_seen:
+                bbfile_seen[base] = f
+            else:
+                topfile = bbfile_seen[base]
+                self.overlayed[topfile].append(f)
+
+        return (bbfiles, masked)
+
+    def get_file_appends(self, fn):
+        """
+        Returns a list of .bbappend files to apply to fn
+        """
+        filelist = []
+        f = os.path.basename(fn)
+        for b in self.bbappends:
+            (bbappend, filename) = b
+            if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
+                filelist.append(filename)
+        return filelist
+
+    def collection_priorities(self, pkgfns, d):
+
+        priorities = {}
+
+        # Calculate priorities for each file
+        matched = set()
+        for p in pkgfns:
+            realfn, cls = bb.cache.Cache.virtualfn2realfn(p)
+            priorities[p] = self.calc_bbfile_priority(realfn, matched)
+
+        # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
+        unmatched = set()
+        for _, _, regex, pri in self.bbfile_config_priorities:
+            if not regex in matched:
+                unmatched.add(regex)
+
+        def findmatch(regex):
+            for b in self.bbappends:
+                (bbfile, append) = b
+                if regex.match(append):
+                    return True
+            return False
+
+        for unmatch in unmatched.copy():
+            if findmatch(unmatch):
+                unmatched.remove(unmatch)
+
+        for collection, pattern, regex, _ in self.bbfile_config_priorities:
+            if regex in unmatched:
+                if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1':
+                    collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
+
+        return priorities
+
+class ParsingFailure(Exception):
+    def __init__(self, realexception, recipe):
+        self.realexception = realexception
+        self.recipe = recipe
+        Exception.__init__(self, realexception, recipe)
+
+class Feeder(multiprocessing.Process):
+    def __init__(self, jobs, to_parsers, quit):
+        self.quit = quit
+        self.jobs = jobs
+        self.to_parsers = to_parsers
+        multiprocessing.Process.__init__(self)
+
+    def run(self):
+        while True:
+            try:
+                quit = self.quit.get_nowait()
+            except Queue.Empty:
+                pass
+            else:
+                if quit == 'cancel':
+                    self.to_parsers.cancel_join_thread()
+                break
+
+            try:
+                job = self.jobs.pop()
+            except IndexError:
+                break
+
+            try:
+                self.to_parsers.put(job, timeout=0.5)
+            except Queue.Full:
+                self.jobs.insert(0, job)
+                continue
+
+class Parser(multiprocessing.Process):
+    def __init__(self, jobs, results, quit, init, profile):
+        self.jobs = jobs
+        self.results = results
+        self.quit = quit
+        self.init = init
+        multiprocessing.Process.__init__(self)
+        self.context = bb.utils.get_context().copy()
+        self.handlers = bb.event.get_class_handlers().copy()
+        self.profile = profile
+
+    def run(self):
+
+        if not self.profile:
+            self.realrun()
+            return
+
+        try:
+            import cProfile as profile
+        except:
+            import profile
+        prof = profile.Profile()
+        try:
+            profile.Profile.runcall(prof, self.realrun)
+        finally:
+            logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
+            prof.dump_stats(logfile)
+
+    def realrun(self):
+        if self.init:
+            self.init()
+
+        pending = []
+        while True:
+            try:
+                self.quit.get_nowait()
+            except Queue.Empty:
+                pass
+            else:
+                self.results.cancel_join_thread()
+                break
+
+            if pending:
+                result = pending.pop()
+            else:
+                try:
+                    job = self.jobs.get(timeout=0.25)
+                except Queue.Empty:
+                    continue
+
+                if job is None:
+                    break
+                result = self.parse(*job)
+
+            try:
+                self.results.put(result, timeout=0.25)
+            except Queue.Full:
+                pending.append(result)
+
+    def parse(self, filename, appends, caches_array):
+        try:
+            # Reset our environment and handlers to the original settings
+            bb.utils.set_context(self.context.copy())
+            bb.event.set_class_handlers(self.handlers.copy())
+            return True, bb.cache.Cache.parse(filename, appends, self.cfg, caches_array)
+        except Exception as exc:
+            tb = sys.exc_info()[2]
+            exc.recipe = filename
+            exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
+            return True, exc
+        # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
+        # and for example a worker thread doesn't just exit on its own in response to
+        # a SystemExit event for example.
+        except BaseException as exc:
+            return True, ParsingFailure(exc, filename)
+
+class CookerParser(object):
+    def __init__(self, cooker, filelist, masked):
+        self.filelist = filelist
+        self.cooker = cooker
+        self.cfgdata = cooker.data
+        self.cfghash = cooker.data_hash
+
+        # Accounting statistics
+        self.parsed = 0
+        self.cached = 0
+        self.error = 0
+        self.masked = masked
+
+        self.skipped = 0
+        self.virtuals = 0
+        self.total = len(filelist)
+
+        self.current = 0
+        self.process_names = []
+
+        self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array)
+        self.fromcache = []
+        self.willparse = []
+        for filename in self.filelist:
+            appends = self.cooker.collection.get_file_appends(filename)
+            if not self.bb_cache.cacheValid(filename, appends):
+                self.willparse.append((filename, appends, cooker.caches_array))
+            else:
+                self.fromcache.append((filename, appends))
+        self.toparse = self.total - len(self.fromcache)
+        self.progress_chunk = max(self.toparse / 100, 1)
+
+        self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
+                                 multiprocessing.cpu_count()), len(self.willparse))
+
+        self.start()
+        self.haveshutdown = False
+
+    def start(self):
+        self.results = self.load_cached()
+        self.processes = []
+        if self.toparse:
+            bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
+            def init():
+                Parser.cfg = self.cfgdata
+                bb.utils.set_process_name(multiprocessing.current_process().name)
+                multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
+                multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
+
+            self.feeder_quit = multiprocessing.Queue(maxsize=1)
+            self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
+            self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
+            self.result_queue = multiprocessing.Queue()
+            self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
+            self.feeder.start()
+            for i in range(0, self.num_processes):
+                parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
+                parser.start()
+                self.process_names.append(parser.name)
+                self.processes.append(parser)
+
+            self.results = itertools.chain(self.results, self.parse_generator())
+
+    def shutdown(self, clean=True, force=False):
+        if not self.toparse:
+            return
+        if self.haveshutdown:
+            return
+        self.haveshutdown = True
+
+        if clean:
+            event = bb.event.ParseCompleted(self.cached, self.parsed,
+                                            self.skipped, self.masked,
+                                            self.virtuals, self.error,
+                                            self.total)
+
+            bb.event.fire(event, self.cfgdata)
+            self.feeder_quit.put(None)
+            for process in self.processes:
+                self.jobs.put(None)
+                self.parser_quit.put(None)
+        else:
+            self.feeder_quit.put('cancel')
+
+            self.parser_quit.cancel_join_thread()
+            for process in self.processes:
+                self.parser_quit.put(None)
+
+            self.jobs.cancel_join_thread()
+
+        for process in self.processes:
+            if force:
+                process.join(.1)
+                process.terminate()
+            else:
+                process.join()
+        self.feeder.join()
+
+        sync = threading.Thread(target=self.bb_cache.sync)
+        sync.start()
+        multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
+        bb.codeparser.parser_cache_savemerge()
+        bb.fetch.fetcher_parse_done()
+        if self.cooker.configuration.profile:
+            profiles = []
+            for i in self.process_names:
+                logfile = "profile-parse-%s.log" % i
+                if os.path.exists(logfile):
+                    profiles.append(logfile)
+
+            pout = "profile-parse.log.processed"
+            bb.utils.process_profilelog(profiles, pout = pout)
+            print("Processed parsing statistics saved to %s" % (pout))
+
+    def load_cached(self):
+        for filename, appends in self.fromcache:
+            cached, infos = self.bb_cache.load(filename, appends, self.cfgdata)
+            yield not cached, infos
+
+    def parse_generator(self):
+        while True:
+            if self.parsed >= self.toparse:
+                break
+
+            try:
+                result = self.result_queue.get(timeout=0.25)
+            except Queue.Empty:
+                pass
+            else:
+                value = result[1]
+                if isinstance(value, BaseException):
+                    raise value
+                else:
+                    yield result
+
+    def parse_next(self):
+        result = []
+        parsed = None
+        try:
+            parsed, result = self.results.next()
+        except StopIteration:
+            self.shutdown()
+            return False
+        except bb.BBHandledException as exc:
+            self.error += 1
+            logger.error('Failed to parse recipe: %s' % exc.recipe)
+            self.shutdown(clean=False)
+            return False
+        except ParsingFailure as exc:
+            self.error += 1
+            logger.error('Unable to parse %s: %s' %
+                     (exc.recipe, bb.exceptions.to_string(exc.realexception)))
+            self.shutdown(clean=False)
+            return False
+        except bb.parse.ParseError as exc:
+            self.error += 1
+            logger.error(str(exc))
+            self.shutdown(clean=False)
+            return False
+        except bb.data_smart.ExpansionError as exc:
+            self.error += 1
+            _, value, _ = sys.exc_info()
+            logger.error('ExpansionError during parsing %s: %s', value.recipe, str(exc))
+            self.shutdown(clean=False)
+            return False
+        except Exception as exc:
+            self.error += 1
+            etype, value, tb = sys.exc_info()
+            if hasattr(value, "recipe"):
+                logger.error('Unable to parse %s', value.recipe,
+                            exc_info=(etype, value, exc.traceback))
+            else:
+                # Most likely, an exception occurred during raising an exception
+                import traceback
+                logger.error('Exception during parse: %s' % traceback.format_exc())
+            self.shutdown(clean=False)
+            return False
+
+        self.current += 1
+        self.virtuals += len(result)
+        if parsed:
+            self.parsed += 1
+            if self.parsed % self.progress_chunk == 0:
+                bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
+                              self.cfgdata)
+        else:
+            self.cached += 1
+
+        for virtualfn, info_array in result:
+            if info_array[0].skipped:
+                self.skipped += 1
+                self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
+            self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecache,
+                                        parsed=parsed, watcher = self.cooker.add_filewatch)
+        return True
+
+    def reparse(self, filename):
+        infos = self.bb_cache.parse(filename,
+                                    self.cooker.collection.get_file_appends(filename),
+                                    self.cfgdata, self.cooker.caches_array)
+        for vfn, info_array in infos:
+            self.cooker.recipecache.add_from_recipeinfo(vfn, info_array)

+ 341 - 0
bitbake/lib/bb/cookerdata.py

@@ -0,0 +1,341 @@
+#!/usr/bin/env python
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Copyright (C) 2003, 2004  Chris Larson
+# Copyright (C) 2003, 2004  Phil Blundell
+# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
+# Copyright (C) 2005        Holger Hans Peter Freyther
+# Copyright (C) 2005        ROAD GmbH
+# Copyright (C) 2006        Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os, sys
+from functools import wraps
+import logging
+import bb
+from bb import data
+import bb.parse
+
+logger      = logging.getLogger("BitBake")
+parselog    = logging.getLogger("BitBake.Parsing")
+
+class ConfigParameters(object):
+    def __init__(self, argv=sys.argv):
+        self.options, targets = self.parseCommandLine(argv)
+        self.environment = self.parseEnvironment()
+
+        self.options.pkgs_to_build = targets or []
+
+        self.options.tracking = False
+        if hasattr(self.options, "show_environment") and self.options.show_environment:
+            self.options.tracking = True
+
+        for key, val in self.options.__dict__.items():
+            setattr(self, key, val)
+
+    def parseCommandLine(self, argv=sys.argv):
+        raise Exception("Caller must implement commandline option parsing")
+
+    def parseEnvironment(self):
+        return os.environ.copy()
+
+    def updateFromServer(self, server):
+        if not self.options.cmd:
+            defaulttask, error = server.runCommand(["getVariable", "BB_DEFAULT_TASK"])
+            if error:
+                raise Exception("Unable to get the value of BB_DEFAULT_TASK from the server: %s" % error)
+            self.options.cmd = defaulttask or "build"
+        _, error = server.runCommand(["setConfig", "cmd", self.options.cmd])
+        if error:
+            raise Exception("Unable to set configuration option 'cmd' on the server: %s" % error)
+
+        if not self.options.pkgs_to_build:
+            bbpkgs, error = server.runCommand(["getVariable", "BBTARGETS"])
+            if error:
+                raise Exception("Unable to get the value of BBTARGETS from the server: %s" % error)
+            if bbpkgs:
+                self.options.pkgs_to_build.extend(bbpkgs.split())
+
+    def updateToServer(self, server, environment):
+        options = {}
+        for o in ["abort", "tryaltconfigs", "force", "invalidate_stamp", 
+                  "verbose", "debug", "dry_run", "dump_signatures", 
+                  "debug_domains", "extra_assume_provided", "profile",
+                  "prefile", "postfile"]:
+            options[o] = getattr(self.options, o)
+
+        ret, error = server.runCommand(["updateConfig", options, environment])
+        if error:
+                raise Exception("Unable to update the server configuration with local parameters: %s" % error)
+
+    def parseActions(self):
+        # Parse any commandline into actions
+        action = {'action':None, 'msg':None}
+        if self.options.show_environment:
+            if 'world' in self.options.pkgs_to_build:
+                action['msg'] = "'world' is not a valid target for --environment."
+            elif 'universe' in self.options.pkgs_to_build:
+                action['msg'] = "'universe' is not a valid target for --environment."
+            elif len(self.options.pkgs_to_build) > 1:
+                action['msg'] = "Only one target can be used with the --environment option."
+            elif self.options.buildfile and len(self.options.pkgs_to_build) > 0:
+                action['msg'] = "No target should be used with the --environment and --buildfile options."
+            elif len(self.options.pkgs_to_build) > 0:
+                action['action'] = ["showEnvironmentTarget", self.options.pkgs_to_build]
+            else:
+                action['action'] = ["showEnvironment", self.options.buildfile]
+        elif self.options.buildfile is not None:
+            action['action'] = ["buildFile", self.options.buildfile, self.options.cmd]
+        elif self.options.revisions_changed:
+            action['action'] = ["compareRevisions"]
+        elif self.options.show_versions:
+            action['action'] = ["showVersions"]
+        elif self.options.parse_only:
+            action['action'] = ["parseFiles"]
+        elif self.options.dot_graph:
+            if self.options.pkgs_to_build:
+                action['action'] = ["generateDotGraph", self.options.pkgs_to_build, self.options.cmd]
+            else:
+                action['msg'] = "Please specify a package name for dependency graph generation."
+        else:
+            if self.options.pkgs_to_build:
+                action['action'] = ["buildTargets", self.options.pkgs_to_build, self.options.cmd]
+            else:
+                #action['msg'] = "Nothing to do.  Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information."
+                action = None
+        self.options.initialaction = action
+        return action
+
+class CookerConfiguration(object):
+    """
+    Manages build options and configurations for one run
+    """
+
+    def __init__(self):
+        self.debug_domains = []
+        self.extra_assume_provided = []
+        self.prefile = []
+        self.postfile = []
+        self.prefile_server = []
+        self.postfile_server = []
+        self.debug = 0
+        self.cmd = None
+        self.abort = True
+        self.force = False
+        self.profile = False
+        self.nosetscene = False
+        self.setsceneonly = False
+        self.invalidate_stamp = False
+        self.dump_signatures = []
+        self.dry_run = False
+        self.tracking = False
+        self.interface = []
+        self.writeeventlog = False
+
+        self.env = {}
+
+    def setConfigParameters(self, parameters):
+        for key in self.__dict__.keys():
+            if key in parameters.options.__dict__:
+                setattr(self, key, parameters.options.__dict__[key])
+        self.env = parameters.environment.copy()
+        self.tracking = parameters.tracking
+
+    def setServerRegIdleCallback(self, srcb):
+        self.server_register_idlecallback = srcb
+
+    def __getstate__(self):
+        state = {}
+        for key in self.__dict__.keys():
+            if key == "server_register_idlecallback":
+                state[key] = None
+            else:
+                state[key] = getattr(self, key)
+        return state
+
+    def __setstate__(self,state):
+        for k in state:
+            setattr(self, k, state[k]) 
+
+
+def catch_parse_error(func):
+    """Exception handling bits for our parsing"""
+    @wraps(func)
+    def wrapped(fn, *args):
+        try:
+            return func(fn, *args)
+        except IOError as exc:
+            import traceback
+            parselog.critical(traceback.format_exc())
+            parselog.critical("Unable to parse %s: %s" % (fn, exc))
+            sys.exit(1)
+        except (bb.parse.ParseError, bb.data_smart.ExpansionError) as exc:
+            import traceback
+
+            bbdir = os.path.dirname(__file__) + os.sep
+            exc_class, exc, tb = sys.exc_info()
+            for tb in iter(lambda: tb.tb_next, None):
+                # Skip frames in bitbake itself, we only want the metadata
+                fn, _, _, _ = traceback.extract_tb(tb, 1)[0]
+                if not fn.startswith(bbdir):
+                    break
+            parselog.critical("Unable to parse %s", fn, exc_info=(exc_class, exc, tb))
+            sys.exit(1)
+    return wrapped
+
+@catch_parse_error
+def parse_config_file(fn, data, include=True):
+    return bb.parse.handle(fn, data, include)
+
+@catch_parse_error
+def _inherit(bbclass, data):
+    bb.parse.BBHandler.inherit(bbclass, "configuration INHERITs", 0, data)
+    return data
+
+def findConfigFile(configfile, data):
+    search = []
+    bbpath = data.getVar("BBPATH", True)
+    if bbpath:
+        for i in bbpath.split(":"):
+            search.append(os.path.join(i, "conf", configfile))
+    path = os.getcwd()
+    while path != "/":
+        search.append(os.path.join(path, "conf", configfile))
+        path, _ = os.path.split(path)
+
+    for i in search:
+        if os.path.exists(i):
+            return i
+
+    return None
+
+class CookerDataBuilder(object):
+
+    def __init__(self, cookercfg, worker = False):
+
+        self.prefiles = cookercfg.prefile
+        self.postfiles = cookercfg.postfile
+        self.tracking = cookercfg.tracking
+
+        bb.utils.set_context(bb.utils.clean_context())
+        bb.event.set_class_handlers(bb.event.clean_class_handlers())
+        self.data = bb.data.init()
+        if self.tracking:
+            self.data.enableTracking()
+
+        # Keep a datastore of the initial environment variables and their
+        # values from when BitBake was launched to enable child processes
+        # to use environment variables which have been cleaned from the
+        # BitBake processes env
+        self.savedenv = bb.data.init()
+        for k in cookercfg.env:
+            self.savedenv.setVar(k, cookercfg.env[k])
+
+        filtered_keys = bb.utils.approved_variables()
+        bb.data.inheritFromOS(self.data, self.savedenv, filtered_keys)
+        self.data.setVar("BB_ORIGENV", self.savedenv)
+        
+        if worker:
+            self.data.setVar("BB_WORKERCONTEXT", "1")
+
+    def parseBaseConfiguration(self):
+        try:
+            self.parseConfigurationFiles(self.prefiles, self.postfiles)
+        except SyntaxError:
+            raise bb.BBHandledException
+        except bb.data_smart.ExpansionError as e:
+            logger.error(str(e))
+            raise bb.BBHandledException
+        except Exception:
+            logger.exception("Error parsing configuration files")
+            raise bb.BBHandledException
+
+    def _findLayerConf(self, data):
+        return findConfigFile("bblayers.conf", data)
+
+    def parseConfigurationFiles(self, prefiles, postfiles):
+        data = self.data
+        bb.parse.init_parser(data)
+
+        # Parse files for loading *before* bitbake.conf and any includes
+        for f in prefiles:
+            data = parse_config_file(f, data)
+
+        layerconf = self._findLayerConf(data)
+        if layerconf:
+            parselog.debug(2, "Found bblayers.conf (%s)", layerconf)
+            # By definition bblayers.conf is in conf/ of TOPDIR.
+            # We may have been called with cwd somewhere else so reset TOPDIR
+            data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf)))
+            data = parse_config_file(layerconf, data)
+
+            layers = (data.getVar('BBLAYERS', True) or "").split()
+
+            data = bb.data.createCopy(data)
+            approved = bb.utils.approved_variables()
+            for layer in layers:
+                parselog.debug(2, "Adding layer %s", layer)
+                if 'HOME' in approved and '~' in layer:
+                    layer = os.path.expanduser(layer)
+                data.setVar('LAYERDIR', layer)
+                data = parse_config_file(os.path.join(layer, "conf", "layer.conf"), data)
+                data.expandVarref('LAYERDIR')
+
+            data.delVar('LAYERDIR')
+
+        if not data.getVar("BBPATH", True):
+            msg = "The BBPATH variable is not set"
+            if not layerconf:
+                msg += (" and bitbake did not find a conf/bblayers.conf file in"
+                        " the expected location.\nMaybe you accidentally"
+                        " invoked bitbake from the wrong directory?")
+            raise SystemExit(msg)
+
+        data = parse_config_file(os.path.join("conf", "bitbake.conf"), data)
+
+        # Parse files for loading *after* bitbake.conf and any includes
+        for p in postfiles:
+            data = parse_config_file(p, data)
+
+        # Handle any INHERITs and inherit the base class
+        bbclasses  = ["base"] + (data.getVar('INHERIT', True) or "").split()
+        for bbclass in bbclasses:
+            data = _inherit(bbclass, data)
+
+        # Nomally we only register event handlers at the end of parsing .bb files
+        # We register any handlers we've found so far here...
+        for var in data.getVar('__BBHANDLERS', False) or []:
+            handlerfn = data.getVarFlag(var, "filename", False)
+            handlerln = int(data.getVarFlag(var, "lineno", False))
+            bb.event.register(var, data.getVar(var, False),  (data.getVarFlag(var, "eventmask", True) or "").split(), handlerfn, handlerln)
+
+        if data.getVar("BB_WORKERCONTEXT", False) is None:
+            bb.fetch.fetcher_init(data)
+        bb.codeparser.parser_cache_init(data)
+        bb.event.fire(bb.event.ConfigParsed(), data)
+
+        if data.getVar("BB_INVALIDCONF", False) is True:
+            data.setVar("BB_INVALIDCONF", False)
+            self.parseConfigurationFiles(self.prefiles, self.postfiles)
+            return
+
+        bb.parse.init_parser(data)
+        data.setVar('BBINCLUDED',bb.parse.get_file_depends(data))
+        self.data = data
+        self.data_hash = data.get_hash()
+
+
+

+ 193 - 0
bitbake/lib/bb/daemonize.py

@@ -0,0 +1,193 @@
+"""
+Python Daemonizing helper
+
+Configurable daemon behaviors:
+
+    1.) The current working directory set to the "/" directory.
+    2.) The current file creation mode mask set to 0.
+    3.) Close all open files (1024). 
+    4.) Redirect standard I/O streams to "/dev/null".
+
+A failed call to fork() now raises an exception.
+
+References:
+    1) Advanced Programming in the Unix Environment: W. Richard Stevens
+	http://www.apuebook.com/apue3e.html
+    2) The Linux Programming Interface: Michael Kerrisk
+	http://man7.org/tlpi/index.html
+    3) Unix Programming Frequently Asked Questions:
+	http://www.faqs.org/faqs/unix-faq/programmer/faq/
+
+Modified to allow a function to be daemonized and return for 
+bitbake use by Richard Purdie
+"""
+
+__author__ = "Chad J. Schroeder"
+__copyright__ = "Copyright (C) 2005 Chad J. Schroeder"
+__version__ = "0.2"
+
+# Standard Python modules.
+import os                    # Miscellaneous OS interfaces.
+import sys                   # System-specific parameters and functions.
+
+# Default daemon parameters.
+# File mode creation mask of the daemon.
+# For BitBake's children, we do want to inherit the parent umask.
+UMASK = None
+
+# Default maximum for the number of available file descriptors.
+MAXFD = 1024
+
+# The standard I/O file descriptors are redirected to /dev/null by default.
+if (hasattr(os, "devnull")):
+    REDIRECT_TO = os.devnull
+else:
+    REDIRECT_TO = "/dev/null"
+
+def createDaemon(function, logfile):
+    """
+    Detach a process from the controlling terminal and run it in the
+    background as a daemon, returning control to the caller.
+    """
+
+    try:
+        # Fork a child process so the parent can exit.  This returns control to
+        # the command-line or shell.  It also guarantees that the child will not
+        # be a process group leader, since the child receives a new process ID
+        # and inherits the parent's process group ID.  This step is required
+        # to insure that the next call to os.setsid is successful.
+        pid = os.fork()
+    except OSError as e:
+        raise Exception("%s [%d]" % (e.strerror, e.errno))
+
+    if (pid == 0):      # The first child.
+        # To become the session leader of this new session and the process group
+        # leader of the new process group, we call os.setsid().  The process is
+        # also guaranteed not to have a controlling terminal.
+        os.setsid()
+
+        # Is ignoring SIGHUP necessary?
+        #
+        # It's often suggested that the SIGHUP signal should be ignored before
+        # the second fork to avoid premature termination of the process.  The
+        # reason is that when the first child terminates, all processes, e.g.
+        # the second child, in the orphaned group will be sent a SIGHUP.
+        #
+        # "However, as part of the session management system, there are exactly
+        # two cases where SIGHUP is sent on the death of a process:
+        #
+        #    1) When the process that dies is the session leader of a session that
+        #        is attached to a terminal device, SIGHUP is sent to all processes
+        #        in the foreground process group of that terminal device.
+        #    2) When the death of a process causes a process group to become
+        #        orphaned, and one or more processes in the orphaned group are
+        #        stopped, then SIGHUP and SIGCONT are sent to all members of the
+        #        orphaned group." [2]
+        #
+        # The first case can be ignored since the child is guaranteed not to have
+        # a controlling terminal.  The second case isn't so easy to dismiss.
+        # The process group is orphaned when the first child terminates and
+        # POSIX.1 requires that every STOPPED process in an orphaned process
+        # group be sent a SIGHUP signal followed by a SIGCONT signal.  Since the
+        # second child is not STOPPED though, we can safely forego ignoring the
+        # SIGHUP signal.  In any case, there are no ill-effects if it is ignored.
+        #
+        # import signal              # Set handlers for asynchronous events.
+        # signal.signal(signal.SIGHUP, signal.SIG_IGN)
+
+        try:
+            # Fork a second child and exit immediately to prevent zombies.  This
+            # causes the second child process to be orphaned, making the init
+            # process responsible for its cleanup.  And, since the first child is
+            # a session leader without a controlling terminal, it's possible for
+            # it to acquire one by opening a terminal in the future (System V-
+            # based systems).  This second fork guarantees that the child is no
+            # longer a session leader, preventing the daemon from ever acquiring
+            # a controlling terminal.
+            pid = os.fork()     # Fork a second child.
+        except OSError as e:
+            raise Exception("%s [%d]" % (e.strerror, e.errno))
+
+        if (pid == 0):  # The second child.
+            # We probably don't want the file mode creation mask inherited from
+            # the parent, so we give the child complete control over permissions.
+            if UMASK is not None:
+                os.umask(UMASK)
+        else:
+            # Parent (the first child) of the second child.
+            os._exit(0)
+    else:
+        # exit() or _exit()?
+        # _exit is like exit(), but it doesn't call any functions registered
+        # with atexit (and on_exit) or any registered signal handlers.  It also
+        # closes any open file descriptors.  Using exit() may cause all stdio
+        # streams to be flushed twice and any temporary files may be unexpectedly
+        # removed.  It's therefore recommended that child branches of a fork()
+        # and the parent branch(es) of a daemon use _exit().
+        return
+
+    # Close all open file descriptors.  This prevents the child from keeping
+    # open any file descriptors inherited from the parent.  There is a variety
+    # of methods to accomplish this task.  Three are listed below.
+    #
+    # Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum
+    # number of open file descriptors to close.  If it doesn't exist, use
+    # the default value (configurable).
+    #
+    # try:
+    #     maxfd = os.sysconf("SC_OPEN_MAX")
+    # except (AttributeError, ValueError):
+    #     maxfd = MAXFD
+    #
+    # OR
+    #
+    # if (os.sysconf_names.has_key("SC_OPEN_MAX")):
+    #     maxfd = os.sysconf("SC_OPEN_MAX")
+    # else:
+    #     maxfd = MAXFD
+    #
+    # OR
+    #
+    # Use the getrlimit method to retrieve the maximum file descriptor number
+    # that can be opened by this process.  If there is no limit on the
+    # resource, use the default value.
+    #
+    import resource             # Resource usage information.
+    maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
+    if (maxfd == resource.RLIM_INFINITY):
+        maxfd = MAXFD
+  
+    # Iterate through and close all file descriptors.
+#    for fd in range(0, maxfd):
+#        try:
+#            os.close(fd)
+#        except OSError:        # ERROR, fd wasn't open to begin with (ignored)
+#            pass
+
+    # Redirect the standard I/O file descriptors to the specified file.  Since
+    # the daemon has no controlling terminal, most daemons redirect stdin,
+    # stdout, and stderr to /dev/null.  This is done to prevent side-effects
+    # from reads and writes to the standard I/O file descriptors.
+
+    # This call to open is guaranteed to return the lowest file descriptor,
+    # which will be 0 (stdin), since it was closed above.
+#    os.open(REDIRECT_TO, os.O_RDWR)    # standard input (0)
+
+    # Duplicate standard input to standard output and standard error.
+#    os.dup2(0, 1)                      # standard output (1)
+#    os.dup2(0, 2)                      # standard error (2)
+
+
+    si = file('/dev/null', 'r')
+    so = file(logfile, 'w')
+    se = so
+
+
+    # Replace those fds with our own
+    os.dup2(si.fileno(), sys.stdin.fileno())
+    os.dup2(so.fileno(), sys.stdout.fileno())
+    os.dup2(se.fileno(), sys.stderr.fileno())
+
+    function()
+
+    os._exit(0)

+ 448 - 0
bitbake/lib/bb/data.py

@@ -0,0 +1,448 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Data' implementations
+
+Functions for interacting with the data structure used by the
+BitBake build tools.
+
+The expandKeys and update_data are the most expensive
+operations. At night the cookie monster came by and
+suggested 'give me cookies on setting the variables and
+things will work out'. Taking this suggestion into account
+applying the skills from the not yet passed 'Entwurf und
+Analyse von Algorithmen' lecture and the cookie
+monster seems to be right. We will track setVar more carefully
+to have faster update_data and expandKeys operations.
+
+This is a trade-off between speed and memory again but
+the speed is more critical here.
+"""
+
+# Copyright (C) 2003, 2004  Chris Larson
+# Copyright (C) 2005        Holger Hans Peter Freyther
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import sys, os, re
+if sys.argv[0][-5:] == "pydoc":
+    path = os.path.dirname(os.path.dirname(sys.argv[1]))
+else:
+    path = os.path.dirname(os.path.dirname(sys.argv[0]))
+sys.path.insert(0, path)
+from itertools import groupby
+
+from bb import data_smart
+from bb import codeparser
+import bb
+
+logger = data_smart.logger
+_dict_type = data_smart.DataSmart
+
+def init():
+    """Return a new object representing the Bitbake data"""
+    return _dict_type()
+
+def init_db(parent = None):
+    """Return a new object representing the Bitbake data,
+    optionally based on an existing object"""
+    if parent is not None:
+        return parent.createCopy()
+    else:
+        return _dict_type()
+
+def createCopy(source):
+    """Link the source set to the destination
+    If one does not find the value in the destination set,
+    search will go on to the source set to get the value.
+    Value from source are copy-on-write. i.e. any try to
+    modify one of them will end up putting the modified value
+    in the destination set.
+    """
+    return source.createCopy()
+
+def initVar(var, d):
+    """Non-destructive var init for data structure"""
+    d.initVar(var)
+
+
+def setVar(var, value, d):
+    """Set a variable to a given value"""
+    d.setVar(var, value)
+
+
+def getVar(var, d, exp = False):
+    """Gets the value of a variable"""
+    return d.getVar(var, exp)
+
+
+def renameVar(key, newkey, d):
+    """Renames a variable from key to newkey"""
+    d.renameVar(key, newkey)
+
+def delVar(var, d):
+    """Removes a variable from the data set"""
+    d.delVar(var)
+
+def appendVar(var, value, d):
+    """Append additional value to a variable"""
+    d.appendVar(var, value)
+
+def setVarFlag(var, flag, flagvalue, d):
+    """Set a flag for a given variable to a given value"""
+    d.setVarFlag(var, flag, flagvalue)
+
+def getVarFlag(var, flag, d):
+    """Gets given flag from given var"""
+    return d.getVarFlag(var, flag, False)
+
+def delVarFlag(var, flag, d):
+    """Removes a given flag from the variable's flags"""
+    d.delVarFlag(var, flag)
+
+def setVarFlags(var, flags, d):
+    """Set the flags for a given variable
+
+    Note:
+        setVarFlags will not clear previous
+        flags. Think of this method as
+        addVarFlags
+    """
+    d.setVarFlags(var, flags)
+
+def getVarFlags(var, d):
+    """Gets a variable's flags"""
+    return d.getVarFlags(var)
+
+def delVarFlags(var, d):
+    """Removes a variable's flags"""
+    d.delVarFlags(var)
+
+def keys(d):
+    """Return a list of keys in d"""
+    return d.keys()
+
+
+__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
+__expand_python_regexp__ = re.compile(r"\${@.+?}")
+
+def expand(s, d, varname = None):
+    """Variable expansion using the data store"""
+    return d.expand(s, varname)
+
+def expandKeys(alterdata, readdata = None):
+    if readdata == None:
+        readdata = alterdata
+
+    todolist = {}
+    for key in alterdata:
+        if not '${' in key:
+            continue
+
+        ekey = expand(key, readdata)
+        if key == ekey:
+            continue
+        todolist[key] = ekey
+
+    # These two for loops are split for performance to maximise the
+    # usefulness of the expand cache
+    for key in sorted(todolist):
+        ekey = todolist[key]
+        newval = alterdata.getVar(ekey, False)
+        if newval is not None:
+            val = alterdata.getVar(key, False)
+            if val is not None:
+                bb.warn("Variable key %s (%s) replaces original key %s (%s)." % (key, val, ekey, newval))
+        alterdata.renameVar(key, ekey)
+
+def inheritFromOS(d, savedenv, permitted):
+    """Inherit variables from the initial environment."""
+    exportlist = bb.utils.preserved_envvars_exported()
+    for s in savedenv.keys():
+        if s in permitted:
+            try:
+                d.setVar(s, savedenv.getVar(s, True), op = 'from env')
+                if s in exportlist:
+                    d.setVarFlag(s, "export", True, op = 'auto env export')
+            except TypeError:
+                pass
+
+def emit_var(var, o=sys.__stdout__, d = init(), all=False):
+    """Emit a variable to be sourced by a shell."""
+    if d.getVarFlag(var, "python", False):
+        return False
+
+    export = d.getVarFlag(var, "export", False)
+    unexport = d.getVarFlag(var, "unexport", False)
+    func = d.getVarFlag(var, "func", False)
+    if not all and not export and not unexport and not func:
+        return False
+
+    try:
+        if all:
+            oval = d.getVar(var, False)
+        val = d.getVar(var, True)
+    except (KeyboardInterrupt, bb.build.FuncFailed):
+        raise
+    except Exception as exc:
+        o.write('# expansion of %s threw %s: %s\n' % (var, exc.__class__.__name__, str(exc)))
+        return False
+
+    if all:
+        d.varhistory.emit(var, oval, val, o, d)
+
+    if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
+        return False
+
+    varExpanded = d.expand(var)
+
+    if unexport:
+        o.write('unset %s\n' % varExpanded)
+        return False
+
+    if val is None:
+        return False
+
+    val = str(val)
+
+    if varExpanded.startswith("BASH_FUNC_"):
+        varExpanded = varExpanded[10:-2]
+        val = val[3:] # Strip off "() "
+        o.write("%s() %s\n" % (varExpanded, val))
+        o.write("export -f %s\n" % (varExpanded))
+        return True
+
+    if func:
+        # NOTE: should probably check for unbalanced {} within the var
+        val = val.rstrip('\n')
+        o.write("%s() {\n%s\n}\n" % (varExpanded, val))
+        return 1
+
+    if export:
+        o.write('export ')
+
+    # if we're going to output this within doublequotes,
+    # to a shell, we need to escape the quotes in the var
+    alter = re.sub('"', '\\"', val)
+    alter = re.sub('\n', ' \\\n', alter)
+    alter = re.sub('\\$', '\\\\$', alter)
+    o.write('%s="%s"\n' % (varExpanded, alter))
+    return False
+
+def emit_env(o=sys.__stdout__, d = init(), all=False):
+    """Emits all items in the data store in a format such that it can be sourced by a shell."""
+
+    isfunc = lambda key: bool(d.getVarFlag(key, "func", False))
+    keys = sorted((key for key in d.keys() if not key.startswith("__")), key=isfunc)
+    grouped = groupby(keys, isfunc)
+    for isfunc, keys in grouped:
+        for key in keys:
+            emit_var(key, o, d, all and not isfunc) and o.write('\n')
+
+def exported_keys(d):
+    return (key for key in d.keys() if not key.startswith('__') and
+                                      d.getVarFlag(key, 'export', False) and
+                                      not d.getVarFlag(key, 'unexport', False))
+
+def exported_vars(d):
+    for key in exported_keys(d):
+        try:
+            value = d.getVar(key, True)
+        except Exception:
+            pass
+
+        if value is not None:
+            yield key, str(value)
+
+def emit_func(func, o=sys.__stdout__, d = init()):
+    """Emits all items in the data store in a format such that it can be sourced by a shell."""
+
+    keys = (key for key in d.keys() if not key.startswith("__") and not d.getVarFlag(key, "func", False))
+    for key in keys:
+        emit_var(key, o, d, False)
+
+    o.write('\n')
+    emit_var(func, o, d, False) and o.write('\n')
+    newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func, True))
+    newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
+    seen = set()
+    while newdeps:
+        deps = newdeps
+        seen |= deps
+        newdeps = set()
+        for dep in deps:
+            if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False):
+               emit_var(dep, o, d, False) and o.write('\n')
+               newdeps |=  bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep, True))
+               newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
+        newdeps -= seen
+
+_functionfmt = """
+def {function}(d):
+{body}"""
+
+def emit_func_python(func, o=sys.__stdout__, d = init()):
+    """Emits all items in the data store in a format such that it can be sourced by a shell."""
+
+    def write_func(func, o, call = False):
+        body = d.getVar(func, False)
+        if not body.startswith("def"):
+            body = _functionfmt.format(function=func, body=body)
+
+        o.write(body.strip() + "\n\n")
+        if call:
+            o.write(func + "(d)" + "\n\n")
+
+    write_func(func, o, True)
+    pp = bb.codeparser.PythonParser(func, logger)
+    pp.parse_python(d.getVar(func, False))
+    newdeps = pp.execs
+    newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
+    seen = set()
+    while newdeps:
+        deps = newdeps
+        seen |= deps
+        newdeps = set()
+        for dep in deps:
+            if d.getVarFlag(dep, "func", False) and d.getVarFlag(dep, "python", False):
+               write_func(dep, o)
+               pp = bb.codeparser.PythonParser(dep, logger)
+               pp.parse_python(d.getVar(dep, False))
+               newdeps |= pp.execs
+               newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
+        newdeps -= seen
+
+def update_data(d):
+    """Performs final steps upon the datastore, including application of overrides"""
+    d.finalize(parent = True)
+
+def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
+    deps = set()
+    try:
+        if key[-1] == ']':
+            vf = key[:-1].split('[')
+            value = d.getVarFlag(vf[0], vf[1], False)
+            parser = d.expandWithRefs(value, key)
+            deps |= parser.references
+            deps = deps | (keys & parser.execs)
+            return deps, value
+        varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "vardepvalueexclude", "postfuncs", "prefuncs", "lineno", "filename"]) or {}
+        vardeps = varflags.get("vardeps")
+        value = d.getVar(key, False)
+
+        def handle_contains(value, contains, d):
+            newvalue = ""
+            for k in sorted(contains):
+                l = (d.getVar(k, True) or "").split()
+                for word in sorted(contains[k]):
+                    if word in l:
+                        newvalue += "\n%s{%s} = Set" %  (k, word)
+                    else:
+                        newvalue += "\n%s{%s} = Unset" %  (k, word)
+            if not newvalue:
+                return value
+            if not value:
+                return newvalue
+            return value + newvalue
+
+        if "vardepvalue" in varflags:
+           value = varflags.get("vardepvalue")
+        elif varflags.get("func"):
+            if varflags.get("python"):
+                parser = bb.codeparser.PythonParser(key, logger)
+                if value and "\t" in value:
+                    logger.warn("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True)))
+                parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
+                deps = deps | parser.references
+                deps = deps | (keys & parser.execs)
+                value = handle_contains(value, parser.contains, d)
+            else:
+                parsedvar = d.expandWithRefs(value, key)
+                parser = bb.codeparser.ShellParser(key, logger)
+                parser.parse_shell(parsedvar.value)
+                deps = deps | shelldeps
+                deps = deps | parsedvar.references
+                deps = deps | (keys & parser.execs) | (keys & parsedvar.execs)
+                value = handle_contains(value, parsedvar.contains, d)
+            if vardeps is None:
+                parser.log.flush()
+            if "prefuncs" in varflags:
+                deps = deps | set(varflags["prefuncs"].split())
+            if "postfuncs" in varflags:
+                deps = deps | set(varflags["postfuncs"].split())
+        else:
+            parser = d.expandWithRefs(value, key)
+            deps |= parser.references
+            deps = deps | (keys & parser.execs)
+            value = handle_contains(value, parser.contains, d)
+
+        if "vardepvalueexclude" in varflags:
+            exclude = varflags.get("vardepvalueexclude")
+            for excl in exclude.split('|'):
+                if excl:
+                    value = value.replace(excl, '')
+
+        # Add varflags, assuming an exclusion list is set
+        if varflagsexcl:
+            varfdeps = []
+            for f in varflags:
+                if f not in varflagsexcl:
+                    varfdeps.append('%s[%s]' % (key, f))
+            if varfdeps:
+                deps |= set(varfdeps)
+
+        deps |= set((vardeps or "").split())
+        deps -= set(varflags.get("vardepsexclude", "").split())
+    except Exception as e:
+        bb.warn("Exception during build_dependencies for %s" % key)
+        raise
+    return deps, value
+    #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
+    #d.setVarFlag(key, "vardeps", deps)
+
+def generate_dependencies(d):
+
+    keys = set(key for key in d if not key.startswith("__"))
+    shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False))
+    varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS', True)
+
+    deps = {}
+    values = {}
+
+    tasklist = d.getVar('__BBTASKS', False) or []
+    for task in tasklist:
+        deps[task], values[task] = build_dependencies(task, keys, shelldeps, varflagsexcl, d)
+        newdeps = deps[task]
+        seen = set()
+        while newdeps:
+            nextdeps = newdeps
+            seen |= nextdeps
+            newdeps = set()
+            for dep in nextdeps:
+                if dep not in deps:
+                    deps[dep], values[dep] = build_dependencies(dep, keys, shelldeps, varflagsexcl, d)
+                newdeps |=  deps[dep]
+            newdeps -= seen
+        #print "For %s: %s" % (task, str(deps[task]))
+    return tasklist, deps, values
+
+def inherits_class(klass, d):
+    val = d.getVar('__inherit_cache', False) or []
+    needle = os.path.join('classes', '%s.bbclass' % klass)
+    for v in val:
+        if v.endswith(needle):
+            return True
+    return False

+ 969 - 0
bitbake/lib/bb/data_smart.py

@@ -0,0 +1,969 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake Smart Dictionary Implementation
+
+Functions for interacting with the data structure used by the
+BitBake build tools.
+
+"""
+
+# Copyright (C) 2003, 2004  Chris Larson
+# Copyright (C) 2004, 2005  Seb Frankengul
+# Copyright (C) 2005, 2006  Holger Hans Peter Freyther
+# Copyright (C) 2005        Uli Luckas
+# Copyright (C) 2005        ROAD GmbH
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import copy, re, sys, traceback
+from collections import MutableMapping
+import logging
+import hashlib
+import bb, bb.codeparser
+from bb   import utils
+from bb.COW  import COWDictBase
+
+logger = logging.getLogger("BitBake.Data")
+
+__setvar_keyword__ = ["_append", "_prepend", "_remove"]
+__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>.*))?$')
+__expand_var_regexp__ = re.compile(r"\${[^{}@\n\t ]+}")
+__expand_python_regexp__ = re.compile(r"\${@.+?}")
+
+def infer_caller_details(loginfo, parent = False, varval = True):
+    """Save the caller the trouble of specifying everything."""
+    # Save effort.
+    if 'ignore' in loginfo and loginfo['ignore']:
+        return
+    # If nothing was provided, mark this as possibly unneeded.
+    if not loginfo:
+        loginfo['ignore'] = True
+        return
+    # Infer caller's likely values for variable (var) and value (value), 
+    # to reduce clutter in the rest of the code.
+    above = None
+    def set_above():
+        try:
+            raise Exception
+        except Exception:
+            tb = sys.exc_info()[2]
+            if parent:
+                return tb.tb_frame.f_back.f_back.f_back
+            else:
+                return tb.tb_frame.f_back.f_back
+
+    if varval and ('variable' not in loginfo or 'detail' not in loginfo):
+        if not above:
+            above = set_above()
+        lcls = above.f_locals.items()
+        for k, v in lcls:
+            if k == 'value' and 'detail' not in loginfo:
+                loginfo['detail'] = v
+            if k == 'var' and 'variable' not in loginfo:
+                loginfo['variable'] = v
+    # Infer file/line/function from traceback
+    # Don't use traceback.extract_stack() since it fills the line contents which
+    # we don't need and that hits stat syscalls
+    if 'file' not in loginfo:
+        if not above:
+            above = set_above()
+        f = above.f_back
+        line = f.f_lineno
+        file = f.f_code.co_filename
+        func = f.f_code.co_name
+        loginfo['file'] = file
+        loginfo['line'] = line
+        if func not in loginfo:
+            loginfo['func'] = func
+
+class VariableParse:
+    def __init__(self, varname, d, val = None):
+        self.varname = varname
+        self.d = d
+        self.value = val
+
+        self.references = set()
+        self.execs = set()
+        self.contains = {}
+
+    def var_sub(self, match):
+            key = match.group()[2:-1]
+            if self.varname and key:
+                if self.varname == key:
+                    raise Exception("variable %s references itself!" % self.varname)
+            if key in self.d.expand_cache:
+                varparse = self.d.expand_cache[key]
+                var = varparse.value
+            else:
+                var = self.d.getVarFlag(key, "_content", True)
+            self.references.add(key)
+            if var is not None:
+                return var
+            else:
+                return match.group()
+
+    def python_sub(self, match):
+            code = match.group()[3:-1]
+            codeobj = compile(code.strip(), self.varname or "<expansion>", "eval")
+
+            parser = bb.codeparser.PythonParser(self.varname, logger)
+            parser.parse_python(code)
+            if self.varname:
+                vardeps = self.d.getVarFlag(self.varname, "vardeps", True)
+                if vardeps is None:
+                    parser.log.flush()
+            else:
+                parser.log.flush()
+            self.references |= parser.references
+            self.execs |= parser.execs
+
+            for k in parser.contains:
+                if k not in self.contains:
+                    self.contains[k] = parser.contains[k].copy()
+                else:
+                    self.contains[k].update(parser.contains[k])
+            value = utils.better_eval(codeobj, DataContext(self.d))
+            return str(value)
+
+
+class DataContext(dict):
+    def __init__(self, metadata, **kwargs):
+        self.metadata = metadata
+        dict.__init__(self, **kwargs)
+        self['d'] = metadata
+
+    def __missing__(self, key):
+        value = self.metadata.getVar(key, True)
+        if value is None or self.metadata.getVarFlag(key, 'func', False):
+            raise KeyError(key)
+        else:
+            return value
+
+class ExpansionError(Exception):
+    def __init__(self, varname, expression, exception):
+        self.expression = expression
+        self.variablename = varname
+        self.exception = exception
+        if varname:
+            if expression:
+                self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception)
+            else:
+                self.msg = "Failure expanding variable %s: %s: %s" % (varname, type(exception).__name__, exception)
+        else:
+            self.msg = "Failure expanding expression %s which triggered exception %s: %s" % (expression, type(exception).__name__, exception)
+        Exception.__init__(self, self.msg)
+        self.args = (varname, expression, exception)
+    def __str__(self):
+        return self.msg
+
+class IncludeHistory(object):
+    def __init__(self, parent = None, filename = '[TOP LEVEL]'):
+        self.parent = parent
+        self.filename = filename
+        self.children = []
+        self.current = self
+
+    def copy(self):
+        new = IncludeHistory(self.parent, self.filename)
+        for c in self.children:
+            new.children.append(c)
+        return new
+
+    def include(self, filename):
+        newfile = IncludeHistory(self.current, filename)
+        self.current.children.append(newfile)
+        self.current = newfile
+        return self
+
+    def __enter__(self):
+        pass
+
+    def __exit__(self, a, b, c):
+        if self.current.parent:
+            self.current = self.current.parent
+        else:
+            bb.warn("Include log: Tried to finish '%s' at top level." % filename)
+        return False
+
+    def emit(self, o, level = 0):
+        """Emit an include history file, and its children."""
+        if level:
+            spaces = "  " * (level - 1)
+            o.write("# %s%s" % (spaces, self.filename))
+            if len(self.children) > 0:
+                o.write(" includes:")
+        else:
+            o.write("#\n# INCLUDE HISTORY:\n#")
+        level = level + 1
+        for child in self.children:
+            o.write("\n")
+            child.emit(o, level)
+
+class VariableHistory(object):
+    def __init__(self, dataroot):
+        self.dataroot = dataroot
+        self.variables = COWDictBase.copy()
+
+    def copy(self):
+        new = VariableHistory(self.dataroot)
+        new.variables = self.variables.copy()
+        return new
+
+    def record(self, *kwonly, **loginfo):
+        if not self.dataroot._tracking:
+            return
+        if len(kwonly) > 0:
+            raise TypeError
+        infer_caller_details(loginfo, parent = True)
+        if 'ignore' in loginfo and loginfo['ignore']:
+            return
+        if 'op' not in loginfo or not loginfo['op']:
+            loginfo['op'] = 'set'
+        if 'detail' in loginfo:
+            loginfo['detail'] = str(loginfo['detail'])
+        if 'variable' not in loginfo or 'file' not in loginfo:
+            raise ValueError("record() missing variable or file.")
+        var = loginfo['variable']
+
+        if var not in self.variables:
+            self.variables[var] = []
+        if not isinstance(self.variables[var], list):
+            return
+        if 'nodups' in loginfo and loginfo in self.variables[var]:
+            return
+        self.variables[var].append(loginfo.copy())
+
+    def variable(self, var):
+        if var in self.variables:
+            return self.variables[var]
+        else:
+            return []
+
+    def emit(self, var, oval, val, o, d):
+        history = self.variable(var)
+
+        # Append override history
+        if var in d.overridedata:
+            for (r, override) in d.overridedata[var]:
+                for event in self.variable(r):
+                    loginfo = event.copy()
+                    if 'flag' in loginfo and not loginfo['flag'].startswith("_"):
+                        continue
+                    loginfo['variable'] = var
+                    loginfo['op'] = 'override[%s]:%s' % (override, loginfo['op'])
+                    history.append(loginfo)
+
+        commentVal = re.sub('\n', '\n#', str(oval))
+        if history:
+            if len(history) == 1:
+                o.write("#\n# $%s\n" % var)
+            else:
+                o.write("#\n# $%s [%d operations]\n" % (var, len(history)))
+            for event in history:
+                # o.write("# %s\n" % str(event))
+                if 'func' in event:
+                    # If we have a function listed, this is internal
+                    # code, not an operation in a config file, and the
+                    # full path is distracting.
+                    event['file'] = re.sub('.*/', '', event['file'])
+                    display_func = ' [%s]' % event['func']
+                else:
+                    display_func = ''
+                if 'flag' in event:
+                    flag = '[%s] ' % (event['flag'])
+                else:
+                    flag = ''
+                o.write("#   %s %s:%s%s\n#     %s\"%s\"\n" % (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n#     ', event['detail'])))
+            if len(history) > 1:
+                o.write("# pre-expansion value:\n")
+                o.write('#   "%s"\n' % (commentVal))
+        else:
+            o.write("#\n# $%s\n#   [no history recorded]\n#\n" % var)
+            o.write('#   "%s"\n' % (commentVal))
+
+    def get_variable_files(self, var):
+        """Get the files where operations are made on a variable"""
+        var_history = self.variable(var)
+        files = []
+        for event in var_history:
+            files.append(event['file'])
+        return files
+
+    def get_variable_lines(self, var, f):
+        """Get the line where a operation is made on a variable in file f"""
+        var_history = self.variable(var)
+        lines = []
+        for event in var_history:
+            if f== event['file']:
+                line = event['line']
+                lines.append(line)
+        return lines
+
+    def get_variable_items_files(self, var, d):
+        """
+        Use variable history to map items added to a list variable and
+        the files in which they were added.
+        """
+        history = self.variable(var)
+        finalitems = (d.getVar(var, True) or '').split()
+        filemap = {}
+        isset = False
+        for event in history:
+            if 'flag' in event:
+                continue
+            if event['op'] == '_remove':
+                continue
+            if isset and event['op'] == 'set?':
+                continue
+            isset = True
+            items = d.expand(event['detail']).split()
+            for item in items:
+                # This is a little crude but is belt-and-braces to avoid us
+                # having to handle every possible operation type specifically
+                if item in finalitems and not item in filemap:
+                    filemap[item] = event['file']
+        return filemap
+
+    def del_var_history(self, var, f=None, line=None):
+        """If file f and line are not given, the entire history of var is deleted"""
+        if var in self.variables:
+            if f and line:
+                self.variables[var] = [ x for x in self.variables[var] if x['file']!=f and x['line']!=line]
+            else:
+                self.variables[var] = []
+
+class DataSmart(MutableMapping):
+    def __init__(self):
+        self.dict = {}
+
+        self.inchistory = IncludeHistory()
+        self.varhistory = VariableHistory(self)
+        self._tracking = False
+
+        self.expand_cache = {}
+
+        # cookie monster tribute
+        # Need to be careful about writes to overridedata as
+        # its only a shallow copy, could influence other data store
+        # copies!
+        self.overridedata = {}
+        self.overrides = None
+        self.overridevars = set(["OVERRIDES", "FILE"])
+        self.inoverride = False
+
+    def enableTracking(self):
+        self._tracking = True
+
+    def disableTracking(self):
+        self._tracking = False
+
+    def expandWithRefs(self, s, varname):
+
+        if not isinstance(s, basestring): # sanity check
+            return VariableParse(varname, self, s)
+
+        if varname and varname in self.expand_cache:
+            return self.expand_cache[varname]
+
+        varparse = VariableParse(varname, self)
+
+        while s.find('${') != -1:
+            olds = s
+            try:
+                s = __expand_var_regexp__.sub(varparse.var_sub, s)
+                try:
+                    s = __expand_python_regexp__.sub(varparse.python_sub, s)
+                except SyntaxError as e:
+                    # Likely unmatched brackets, just don't expand the expression
+                    if e.msg != "EOL while scanning string literal":
+                        raise
+                if s == olds:
+                    break
+            except ExpansionError:
+                raise
+            except bb.parse.SkipRecipe:
+                raise
+            except Exception as exc:
+                exc_class, exc, tb = sys.exc_info()
+                raise ExpansionError, ExpansionError(varname, s, exc), tb
+
+        varparse.value = s
+
+        if varname:
+            self.expand_cache[varname] = varparse
+
+        return varparse
+
+    def expand(self, s, varname = None):
+        return self.expandWithRefs(s, varname).value
+
+    def finalize(self, parent = False):
+        return
+
+    def internal_finalize(self, parent = False):
+        """Performs final steps upon the datastore, including application of overrides"""
+        self.overrides = None
+
+    def need_overrides(self):
+        if self.overrides is not None:
+            return
+        if self.inoverride:
+            return
+        for count in range(5):
+            self.inoverride = True
+            # Can end up here recursively so setup dummy values
+            self.overrides = []
+            self.overridesset = set()
+            self.overrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
+            self.overridesset = set(self.overrides)
+            self.inoverride = False
+            self.expand_cache = {}
+            newoverrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
+            if newoverrides == self.overrides:
+                break
+            self.overrides = newoverrides
+            self.overridesset = set(self.overrides)
+        else:
+            bb.fatal("Overrides could not be expanded into a stable state after 5 iterations, overrides must be being referenced by other overridden variables in some recursive fashion. Please provide your configuration to bitbake-devel so we can laugh, er, I mean try and understand how to make it work.")
+
+    def initVar(self, var):
+        self.expand_cache = {}
+        if not var in self.dict:
+            self.dict[var] = {}
+
+    def _findVar(self, var):
+        dest = self.dict
+        while dest:
+            if var in dest:
+                return dest[var]
+
+            if "_data" not in dest:
+                break
+            dest = dest["_data"]
+
+    def _makeShadowCopy(self, var):
+        if var in self.dict:
+            return
+
+        local_var = self._findVar(var)
+
+        if local_var:
+            self.dict[var] = copy.copy(local_var)
+        else:
+            self.initVar(var)
+
+
+    def setVar(self, var, value, **loginfo):
+        #print("var=" + str(var) + "  val=" + str(value))
+        parsing=False
+        if 'parsing' in loginfo:
+            parsing=True
+
+        if 'op' not in loginfo:
+            loginfo['op'] = "set"
+        self.expand_cache = {}
+        match  = __setvar_regexp__.match(var)
+        if match and match.group("keyword") in __setvar_keyword__:
+            base = match.group('base')
+            keyword = match.group("keyword")
+            override = match.group('add')
+            l = self.getVarFlag(base, keyword, False) or []
+            l.append([value, override])
+            self.setVarFlag(base, keyword, l, ignore=True)
+            # And cause that to be recorded:
+            loginfo['detail'] = value
+            loginfo['variable'] = base
+            if override:
+                loginfo['op'] = '%s[%s]' % (keyword, override)
+            else:
+                loginfo['op'] = keyword
+            self.varhistory.record(**loginfo)
+            # todo make sure keyword is not __doc__ or __module__
+            # pay the cookie monster
+
+            # more cookies for the cookie monster
+            if '_' in var:
+                self._setvar_update_overrides(base, **loginfo)
+
+            if base in self.overridevars:
+                self._setvar_update_overridevars(var, value)
+            return
+
+        if not var in self.dict:
+            self._makeShadowCopy(var)
+
+        if not parsing:
+            if "_append" in self.dict[var]:
+                del self.dict[var]["_append"]
+            if "_prepend" in self.dict[var]:
+                del self.dict[var]["_prepend"]
+            if var in self.overridedata:
+                active = []
+                self.need_overrides()
+                for (r, o) in self.overridedata[var]:
+                    if o in self.overridesset:
+                        active.append(r)
+                    elif "_" in o:
+                        if set(o.split("_")).issubset(self.overridesset):
+                            active.append(r)
+                for a in active:
+                    self.delVar(a)
+                del self.overridedata[var]
+
+        # more cookies for the cookie monster
+        if '_' in var:
+            self._setvar_update_overrides(var, **loginfo)
+
+        # setting var
+        self.dict[var]["_content"] = value
+        self.varhistory.record(**loginfo)
+
+        if var in self.overridevars:
+            self._setvar_update_overridevars(var, value)
+
+    def _setvar_update_overridevars(self, var, value):
+        vardata = self.expandWithRefs(value, var)
+        new = vardata.references
+        new.update(vardata.contains.keys())
+        while not new.issubset(self.overridevars):
+            nextnew = set()
+            self.overridevars.update(new)
+            for i in new:
+                vardata = self.expandWithRefs(self.getVar(i, True), i)
+                nextnew.update(vardata.references)
+                nextnew.update(vardata.contains.keys())
+            new = nextnew
+        self.internal_finalize(True)
+
+    def _setvar_update_overrides(self, var, **loginfo):
+        # aka pay the cookie monster
+        override = var[var.rfind('_')+1:]
+        shortvar = var[:var.rfind('_')]
+        while override and override.islower():
+            if shortvar not in self.overridedata:
+                self.overridedata[shortvar] = []
+            if [var, override] not in self.overridedata[shortvar]:
+                # Force CoW by recreating the list first
+                self.overridedata[shortvar] = list(self.overridedata[shortvar])
+                self.overridedata[shortvar].append([var, override])
+            override = None
+            if "_" in shortvar:
+                override = var[shortvar.rfind('_')+1:]
+                shortvar = var[:shortvar.rfind('_')]
+                if len(shortvar) == 0:
+                    override = None
+
+    def getVar(self, var, expand, noweakdefault=False, parsing=False):
+        return self.getVarFlag(var, "_content", expand, noweakdefault, parsing)
+
+    def renameVar(self, key, newkey, **loginfo):
+        """
+        Rename the variable key to newkey
+        """
+        val = self.getVar(key, 0, parsing=True)
+        if val is not None:
+            loginfo['variable'] = newkey
+            loginfo['op'] = 'rename from %s' % key
+            loginfo['detail'] = val
+            self.varhistory.record(**loginfo)
+            self.setVar(newkey, val, ignore=True, parsing=True)
+
+        for i in (__setvar_keyword__):
+            src = self.getVarFlag(key, i, False)
+            if src is None:
+                continue
+
+            dest = self.getVarFlag(newkey, i, False) or []
+            dest.extend(src)
+            self.setVarFlag(newkey, i, dest, ignore=True)
+
+        if key in self.overridedata:
+            self.overridedata[newkey] = []
+            for (v, o) in self.overridedata[key]:
+                self.overridedata[newkey].append([v.replace(key, newkey), o])
+                self.renameVar(v, v.replace(key, newkey))
+
+        if '_' in newkey and val is None:
+            self._setvar_update_overrides(newkey, **loginfo)
+
+        loginfo['variable'] = key
+        loginfo['op'] = 'rename (to)'
+        loginfo['detail'] = newkey
+        self.varhistory.record(**loginfo)
+        self.delVar(key, ignore=True)
+
+    def appendVar(self, var, value, **loginfo):
+        loginfo['op'] = 'append'
+        self.varhistory.record(**loginfo)
+        self.setVar(var + "_append", value, ignore=True, parsing=True)
+
+    def prependVar(self, var, value, **loginfo):
+        loginfo['op'] = 'prepend'
+        self.varhistory.record(**loginfo)
+        self.setVar(var + "_prepend", value, ignore=True, parsing=True)
+
+    def delVar(self, var, **loginfo):
+        loginfo['detail'] = ""
+        loginfo['op'] = 'del'
+        self.varhistory.record(**loginfo)
+        self.expand_cache = {}
+        self.dict[var] = {}
+        if var in self.overridedata:
+            del self.overridedata[var]
+        if '_' in var:
+            override = var[var.rfind('_')+1:]
+            shortvar = var[:var.rfind('_')]
+            while override and override.islower():
+                try:
+                    if shortvar in self.overridedata:
+                        # Force CoW by recreating the list first
+                        self.overridedata[shortvar] = list(self.overridedata[shortvar])
+                        self.overridedata[shortvar].remove([var, override])
+                except ValueError as e:
+                    pass
+                override = None
+                if "_" in shortvar:
+                    override = var[shortvar.rfind('_')+1:]
+                    shortvar = var[:shortvar.rfind('_')]
+                    if len(shortvar) == 0:
+                         override = None
+
+    def setVarFlag(self, var, flag, value, **loginfo):
+        self.expand_cache = {}
+        if 'op' not in loginfo:
+            loginfo['op'] = "set"
+        loginfo['flag'] = flag
+        self.varhistory.record(**loginfo)
+        if not var in self.dict:
+            self._makeShadowCopy(var)
+        self.dict[var][flag] = value
+
+        if flag == "_defaultval" and '_' in var:
+            self._setvar_update_overrides(var, **loginfo)
+        if flag == "_defaultval" and var in self.overridevars:
+            self._setvar_update_overridevars(var, value)
+
+        if flag == "unexport" or flag == "export":
+            if not "__exportlist" in self.dict:
+                self._makeShadowCopy("__exportlist")
+            if not "_content" in self.dict["__exportlist"]:
+                self.dict["__exportlist"]["_content"] = set()
+            self.dict["__exportlist"]["_content"].add(var)
+
+    def getVarFlag(self, var, flag, expand, noweakdefault=False, parsing=False):
+        local_var = self._findVar(var)
+        value = None
+        if flag == "_content" and var in self.overridedata and not parsing:
+            match = False
+            active = {}
+            self.need_overrides()
+            for (r, o) in self.overridedata[var]:
+                # What about double overrides both with "_" in the name?
+                if o in self.overridesset:
+                    active[o] = r
+                elif "_" in o:
+                    if set(o.split("_")).issubset(self.overridesset):
+                        active[o] = r
+
+            mod = True
+            while mod:
+                mod = False
+                for o in self.overrides:
+                    for a in active.copy():
+                        if a.endswith("_" + o):
+                            t = active[a]
+                            del active[a]
+                            active[a.replace("_" + o, "")] = t
+                            mod = True
+                        elif a == o:
+                            match = active[a]
+                            del active[a]
+            if match:
+                value = self.getVar(match, False)
+
+        if local_var is not None and value is None:
+            if flag in local_var:
+                value = copy.copy(local_var[flag])
+            elif flag == "_content" and "_defaultval" in local_var and not noweakdefault:
+                value = copy.copy(local_var["_defaultval"])
+
+
+        if flag == "_content" and local_var is not None and "_append" in local_var and not parsing:
+            if not value:
+                value = ""
+            self.need_overrides()
+            for (r, o) in local_var["_append"]:
+                match = True
+                if o:
+                    for o2 in o.split("_"):
+                        if not o2 in self.overrides:
+                            match = False                            
+                if match:
+                    value = value + r
+
+        if flag == "_content" and local_var is not None and "_prepend" in local_var and not parsing:
+            if not value:
+                value = ""
+            self.need_overrides()
+            for (r, o) in local_var["_prepend"]:
+
+                match = True
+                if o:
+                    for o2 in o.split("_"):
+                        if not o2 in self.overrides:
+                            match = False                            
+                if match:
+                    value = r + value
+
+        if expand and value:
+            # Only getvar (flag == _content) hits the expand cache
+            cachename = None
+            if flag == "_content":
+                cachename = var
+            else:
+                cachename = var + "[" + flag + "]"
+            value = self.expand(value, cachename)
+
+        if value and flag == "_content" and local_var is not None and "_remove" in local_var:
+            removes = []
+            self.need_overrides()
+            for (r, o) in local_var["_remove"]:
+                match = True
+                if o:
+                    for o2 in o.split("_"):
+                        if not o2 in self.overrides:
+                            match = False                            
+                if match:
+                    removes.extend(self.expand(r).split())
+
+            filtered = filter(lambda v: v not in removes,
+                              value.split())
+            value = " ".join(filtered)
+            if expand and var in self.expand_cache:
+                 # We need to ensure the expand cache has the correct value
+                 # flag == "_content" here
+                self.expand_cache[var].value = value
+        return value
+
+    def delVarFlag(self, var, flag, **loginfo):
+        self.expand_cache = {}
+        local_var = self._findVar(var)
+        if not local_var:
+            return
+        if not var in self.dict:
+            self._makeShadowCopy(var)
+
+        if var in self.dict and flag in self.dict[var]:
+            loginfo['detail'] = ""
+            loginfo['op'] = 'delFlag'
+            loginfo['flag'] = flag
+            self.varhistory.record(**loginfo)
+
+            del self.dict[var][flag]
+
+    def appendVarFlag(self, var, flag, value, **loginfo):
+        loginfo['op'] = 'append'
+        loginfo['flag'] = flag
+        self.varhistory.record(**loginfo)
+        newvalue = (self.getVarFlag(var, flag, False) or "") + value
+        self.setVarFlag(var, flag, newvalue, ignore=True)
+
+    def prependVarFlag(self, var, flag, value, **loginfo):
+        loginfo['op'] = 'prepend'
+        loginfo['flag'] = flag
+        self.varhistory.record(**loginfo)
+        newvalue = value + (self.getVarFlag(var, flag, False) or "")
+        self.setVarFlag(var, flag, newvalue, ignore=True)
+
+    def setVarFlags(self, var, flags, **loginfo):
+        self.expand_cache = {}
+        infer_caller_details(loginfo)
+        if not var in self.dict:
+            self._makeShadowCopy(var)
+
+        for i in flags:
+            if i == "_content":
+                continue
+            loginfo['flag'] = i
+            loginfo['detail'] = flags[i]
+            self.varhistory.record(**loginfo)
+            self.dict[var][i] = flags[i]
+
+    def getVarFlags(self, var, expand = False, internalflags=False):
+        local_var = self._findVar(var)
+        flags = {}
+
+        if local_var:
+            for i in local_var:
+                if i.startswith("_") and not internalflags:
+                    continue
+                flags[i] = local_var[i]
+                if expand and i in expand:
+                    flags[i] = self.expand(flags[i], var + "[" + i + "]")
+        if len(flags) == 0:
+            return None
+        return flags
+
+
+    def delVarFlags(self, var, **loginfo):
+        self.expand_cache = {}
+        if not var in self.dict:
+            self._makeShadowCopy(var)
+
+        if var in self.dict:
+            content = None
+
+            loginfo['op'] = 'delete flags'
+            self.varhistory.record(**loginfo)
+
+            # try to save the content
+            if "_content" in self.dict[var]:
+                content  = self.dict[var]["_content"]
+                self.dict[var]            = {}
+                self.dict[var]["_content"] = content
+            else:
+                del self.dict[var]
+
+    def createCopy(self):
+        """
+        Create a copy of self by setting _data to self
+        """
+        # we really want this to be a DataSmart...
+        data = DataSmart()
+        data.dict["_data"] = self.dict
+        data.varhistory = self.varhistory.copy()
+        data.varhistory.datasmart = data
+        data.inchistory = self.inchistory.copy()
+
+        data._tracking = self._tracking
+
+        data.overrides = None
+        data.overridevars = copy.copy(self.overridevars)
+        # Should really be a deepcopy but has heavy overhead.
+        # Instead, we're careful with writes.
+        data.overridedata = copy.copy(self.overridedata)
+
+        return data
+
+    def expandVarref(self, variable, parents=False):
+        """Find all references to variable in the data and expand it
+           in place, optionally descending to parent datastores."""
+
+        if parents:
+            keys = iter(self)
+        else:
+            keys = self.localkeys()
+
+        ref = '${%s}' % variable
+        value = self.getVar(variable, False)
+        for key in keys:
+            referrervalue = self.getVar(key, False)
+            if referrervalue and ref in referrervalue:
+                self.setVar(key, referrervalue.replace(ref, value))
+
+    def localkeys(self):
+        for key in self.dict:
+            if key != '_data':
+                yield key
+
+    def __iter__(self):
+        deleted = set()
+        overrides = set()
+        def keylist(d):        
+            klist = set()
+            for key in d:
+                if key == "_data":
+                    continue
+                if key in deleted:
+                    continue
+                if key in overrides:
+                    continue
+                if not d[key]:
+                    deleted.add(key)
+                    continue
+                klist.add(key)
+
+            if "_data" in d:
+                klist |= keylist(d["_data"])
+
+            return klist
+
+        self.need_overrides()
+        for var in self.overridedata:
+            for (r, o) in self.overridedata[var]:
+                if o in self.overridesset:
+                    overrides.add(var)
+                elif "_" in o:
+                    if set(o.split("_")).issubset(self.overridesset):
+                        overrides.add(var)
+
+        for k in keylist(self.dict):
+             yield k
+
+        for k in overrides:
+             yield k
+
+    def __len__(self):
+        return len(frozenset(self))
+
+    def __getitem__(self, item):
+        value = self.getVar(item, False)
+        if value is None:
+            raise KeyError(item)
+        else:
+            return value
+
+    def __setitem__(self, var, value):
+        self.setVar(var, value)
+
+    def __delitem__(self, var):
+        self.delVar(var)
+
+    def get_hash(self):
+        data = {}
+        d = self.createCopy()
+        bb.data.expandKeys(d)
+        bb.data.update_data(d)
+
+        config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST", True) or "").split())
+        keys = set(key for key in iter(d) if not key.startswith("__"))
+        for key in keys:
+            if key in config_whitelist:
+                continue
+
+            value = d.getVar(key, False) or ""
+            data.update({key:value})
+
+            varflags = d.getVarFlags(key, internalflags = True)
+            if not varflags:
+                continue
+            for f in varflags:
+                if f == "_content":
+                    continue
+                data.update({'%s[%s]' % (key, f):varflags[f]})
+
+        for key in ["__BBTASKS", "__BBANONFUNCS", "__BBHANDLERS"]:
+            bb_list = d.getVar(key, False) or []
+            bb_list.sort()
+            data.update({key:str(bb_list)})
+
+            if key == "__BBANONFUNCS":
+                for i in bb_list:
+                    value = d.getVar(i, False) or ""
+                    data.update({i:value})
+
+        data_str = str([(k, data[k]) for k in sorted(data.keys())])
+        return hashlib.md5(data_str).hexdigest()

+ 679 - 0
bitbake/lib/bb/event.py

@@ -0,0 +1,679 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Event' implementation
+
+Classes and functions for manipulating 'events' in the
+BitBake build tools.
+"""
+
+# Copyright (C) 2003, 2004  Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os, sys
+import warnings
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+import logging
+import atexit
+import traceback
+import ast
+import bb.utils
+import bb.compat
+import bb.exceptions
+
+# This is the pid for which we should generate the event. This is set when
+# the runqueue forks off.
+worker_pid = 0
+worker_fire = None
+
+logger = logging.getLogger('BitBake.Event')
+
+class Event(object):
+    """Base class for events"""
+
+    def __init__(self):
+        self.pid = worker_pid
+
+Registered        = 10
+AlreadyRegistered = 14
+
+def get_class_handlers():
+    return _handlers
+
+def set_class_handlers(h):
+    global _handlers
+    _handlers = h
+
+def clean_class_handlers():
+    return bb.compat.OrderedDict()
+
+# Internal
+_handlers = clean_class_handlers()
+_ui_handlers = {}
+_ui_logfilters = {}
+_ui_handler_seq = 0
+_event_handler_map = {}
+_catchall_handlers = {}
+_eventfilter = None
+_uiready = False
+
+def execute_handler(name, handler, event, d):
+    event.data = d
+    addedd = False
+    if 'd' not in __builtins__:
+        __builtins__['d'] = d
+        addedd = True
+    try:
+        ret = handler(event)
+    except (bb.parse.SkipRecipe, bb.BBHandledException):
+        raise
+    except Exception:
+        etype, value, tb = sys.exc_info()
+        logger.error("Execution of event handler '%s' failed" % name,
+                        exc_info=(etype, value, tb.tb_next))
+        raise
+    except SystemExit as exc:
+        if exc.code != 0:
+            logger.error("Execution of event handler '%s' failed" % name)
+        raise
+    finally:
+        del event.data
+        if addedd:
+            del __builtins__['d']
+
+def fire_class_handlers(event, d):
+    if isinstance(event, logging.LogRecord):
+        return
+
+    eid = str(event.__class__)[8:-2]
+    evt_hmap = _event_handler_map.get(eid, {})
+    for name, handler in _handlers.iteritems():
+        if name in _catchall_handlers or name in evt_hmap:
+            if _eventfilter:
+                if not _eventfilter(name, handler, event, d):
+                    continue
+            execute_handler(name, handler, event, d)
+
+ui_queue = []
+@atexit.register
+def print_ui_queue():
+    """If we're exiting before a UI has been spawned, display any queued
+    LogRecords to the console."""
+    logger = logging.getLogger("BitBake")
+    if not _uiready:
+        from bb.msg import BBLogFormatter
+        console = logging.StreamHandler(sys.stdout)
+        console.setFormatter(BBLogFormatter("%(levelname)s: %(message)s"))
+        logger.handlers = [console]
+
+        # First check to see if we have any proper messages
+        msgprint = False
+        for event in ui_queue:
+            if isinstance(event, logging.LogRecord):
+                if event.levelno > logging.DEBUG:
+                    logger.handle(event)
+                    msgprint = True
+        if msgprint:
+            return
+
+        # Nope, so just print all of the messages we have (including debug messages)
+        for event in ui_queue:
+            if isinstance(event, logging.LogRecord):
+                logger.handle(event)
+
+def fire_ui_handlers(event, d):
+    if not _uiready:
+        # No UI handlers registered yet, queue up the messages
+        ui_queue.append(event)
+        return
+
+    errors = []
+    for h in _ui_handlers:
+        #print "Sending event %s" % event
+        try:
+             if not _ui_logfilters[h].filter(event):
+                 continue
+             # We use pickle here since it better handles object instances
+             # which xmlrpc's marshaller does not. Events *must* be serializable
+             # by pickle.
+             if hasattr(_ui_handlers[h].event, "sendpickle"):
+                _ui_handlers[h].event.sendpickle((pickle.dumps(event)))
+             else:
+                _ui_handlers[h].event.send(event)
+        except:
+            errors.append(h)
+    for h in errors:
+        del _ui_handlers[h]
+
+def fire(event, d):
+    """Fire off an Event"""
+
+    # We can fire class handlers in the worker process context and this is
+    # desired so they get the task based datastore.
+    # UI handlers need to be fired in the server context so we defer this. They
+    # don't have a datastore so the datastore context isn't a problem.
+
+    fire_class_handlers(event, d)
+    if worker_fire:
+        worker_fire(event, d)
+    else:
+        fire_ui_handlers(event, d)
+
+def fire_from_worker(event, d):
+    fire_ui_handlers(event, d)
+
+noop = lambda _: None
+def register(name, handler, mask=None, filename=None, lineno=None):
+    """Register an Event handler"""
+
+    # already registered
+    if name in _handlers:
+        return AlreadyRegistered
+
+    if handler is not None:
+        # handle string containing python code
+        if isinstance(handler, basestring):
+            tmp = "def %s(e):\n%s" % (name, handler)
+            try:
+                code = bb.methodpool.compile_cache(tmp)
+                if not code:
+                    if filename is None:
+                        filename = "%s(e)" % name
+                    code = compile(tmp, filename, "exec", ast.PyCF_ONLY_AST)
+                    if lineno is not None:
+                        ast.increment_lineno(code, lineno-1)
+                    code = compile(code, filename, "exec")
+                    bb.methodpool.compile_cache_add(tmp, code)
+            except SyntaxError:
+                logger.error("Unable to register event handler '%s':\n%s", name,
+                             ''.join(traceback.format_exc(limit=0)))
+                _handlers[name] = noop
+                return
+            env = {}
+            bb.utils.better_exec(code, env)
+            func = bb.utils.better_eval(name, env)
+            _handlers[name] = func
+        else:
+            _handlers[name] = handler
+
+        if not mask or '*' in mask:
+            _catchall_handlers[name] = True
+        else:
+            for m in mask:
+                if _event_handler_map.get(m, None) is None:
+                    _event_handler_map[m] = {}
+                _event_handler_map[m][name] = True
+
+        return Registered
+
+def remove(name, handler):
+    """Remove an Event handler"""
+    _handlers.pop(name)
+
+def set_eventfilter(func):
+    global _eventfilter
+    _eventfilter = func
+
+def register_UIHhandler(handler, mainui=False):
+    if mainui:
+        global _uiready
+        _uiready = True
+    bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
+    _ui_handlers[_ui_handler_seq] = handler
+    level, debug_domains = bb.msg.constructLogOptions()
+    _ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains)
+    return _ui_handler_seq
+
+def unregister_UIHhandler(handlerNum):
+    if handlerNum in _ui_handlers:
+        del _ui_handlers[handlerNum]
+    return
+
+# Class to allow filtering of events and specific filtering of LogRecords *before* we put them over the IPC
+class UIEventFilter(object):
+    def __init__(self, level, debug_domains):
+        self.update(None, level, debug_domains)
+
+    def update(self, eventmask, level, debug_domains):
+        self.eventmask = eventmask
+        self.stdlevel = level
+        self.debug_domains = debug_domains
+
+    def filter(self, event):
+        if isinstance(event, logging.LogRecord):
+            if event.levelno >= self.stdlevel:
+                return True
+            if event.name in self.debug_domains and event.levelno >= self.debug_domains[event.name]:
+                return True
+            return False
+        eid = str(event.__class__)[8:-2]
+        if self.eventmask and eid not in self.eventmask:
+            return False
+        return True
+
+def set_UIHmask(handlerNum, level, debug_domains, mask):
+    if not handlerNum in _ui_handlers:
+        return False
+    if '*' in mask:
+        _ui_logfilters[handlerNum].update(None, level, debug_domains)
+    else:
+        _ui_logfilters[handlerNum].update(mask, level, debug_domains)
+    return True
+
+def getName(e):
+    """Returns the name of a class or class instance"""
+    if getattr(e, "__name__", None) == None:
+        return e.__class__.__name__
+    else:
+        return e.__name__
+
+class OperationStarted(Event):
+    """An operation has begun"""
+    def __init__(self, msg = "Operation Started"):
+        Event.__init__(self)
+        self.msg = msg
+
+class OperationCompleted(Event):
+    """An operation has completed"""
+    def __init__(self, total, msg = "Operation Completed"):
+        Event.__init__(self)
+        self.total = total
+        self.msg = msg
+
+class OperationProgress(Event):
+    """An operation is in progress"""
+    def __init__(self, current, total, msg = "Operation in Progress"):
+        Event.__init__(self)
+        self.current = current
+        self.total = total
+        self.msg = msg + ": %s/%s" % (current, total);
+
+class ConfigParsed(Event):
+    """Configuration Parsing Complete"""
+
+class RecipeEvent(Event):
+    def __init__(self, fn):
+        self.fn = fn
+        Event.__init__(self)
+
+class RecipePreFinalise(RecipeEvent):
+    """ Recipe Parsing Complete but not yet finialised"""
+
+class RecipeParsed(RecipeEvent):
+    """ Recipe Parsing Complete """
+
+class StampUpdate(Event):
+    """Trigger for any adjustment of the stamp files to happen"""
+
+    def __init__(self, targets, stampfns):
+        self._targets = targets
+        self._stampfns = stampfns
+        Event.__init__(self)
+
+    def getStampPrefix(self):
+        return self._stampfns
+
+    def getTargets(self):
+        return self._targets
+
+    stampPrefix = property(getStampPrefix)
+    targets = property(getTargets)
+
+class BuildBase(Event):
+    """Base class for bbmake run events"""
+
+    def __init__(self, n, p, failures = 0):
+        self._name = n
+        self._pkgs = p
+        Event.__init__(self)
+        self._failures = failures
+
+    def getPkgs(self):
+        return self._pkgs
+
+    def setPkgs(self, pkgs):
+        self._pkgs = pkgs
+
+    def getName(self):
+        return self._name
+
+    def setName(self, name):
+        self._name = name
+
+    def getCfg(self):
+        return self.data
+
+    def setCfg(self, cfg):
+        self.data = cfg
+
+    def getFailures(self):
+        """
+        Return the number of failed packages
+        """
+        return self._failures
+
+    pkgs = property(getPkgs, setPkgs, None, "pkgs property")
+    name = property(getName, setName, None, "name property")
+    cfg = property(getCfg, setCfg, None, "cfg property")
+
+
+
+
+
+class BuildStarted(BuildBase, OperationStarted):
+    """bbmake build run started"""
+    def __init__(self, n, p, failures = 0):
+        OperationStarted.__init__(self, "Building Started")
+        BuildBase.__init__(self, n, p, failures)
+
+class BuildCompleted(BuildBase, OperationCompleted):
+    """bbmake build run completed"""
+    def __init__(self, total, n, p, failures=0, interrupted=0):
+        if not failures:
+            OperationCompleted.__init__(self, total, "Building Succeeded")
+        else:
+            OperationCompleted.__init__(self, total, "Building Failed")
+        self._interrupted = interrupted
+        BuildBase.__init__(self, n, p, failures)
+
+class DiskFull(Event):
+    """Disk full case build aborted"""
+    def __init__(self, dev, type, freespace, mountpoint):
+        Event.__init__(self)
+        self._dev = dev
+        self._type = type
+        self._free = freespace
+        self._mountpoint = mountpoint
+
+class NoProvider(Event):
+    """No Provider for an Event"""
+
+    def __init__(self, item, runtime=False, dependees=None, reasons=None, close_matches=None):
+        Event.__init__(self)
+        self._item = item
+        self._runtime = runtime
+        self._dependees = dependees
+        self._reasons = reasons
+        self._close_matches = close_matches
+
+    def getItem(self):
+        return self._item
+
+    def isRuntime(self):
+        return self._runtime
+
+class MultipleProviders(Event):
+    """Multiple Providers"""
+
+    def  __init__(self, item, candidates, runtime = False):
+        Event.__init__(self)
+        self._item = item
+        self._candidates = candidates
+        self._is_runtime = runtime
+
+    def isRuntime(self):
+        """
+        Is this a runtime issue?
+        """
+        return self._is_runtime
+
+    def getItem(self):
+        """
+        The name for the to be build item
+        """
+        return self._item
+
+    def getCandidates(self):
+        """
+        Get the possible Candidates for a PROVIDER.
+        """
+        return self._candidates
+
+class ParseStarted(OperationStarted):
+    """Recipe parsing for the runqueue has begun"""
+    def __init__(self, total):
+        OperationStarted.__init__(self, "Recipe parsing Started")
+        self.total = total
+
+class ParseCompleted(OperationCompleted):
+    """Recipe parsing for the runqueue has completed"""
+    def __init__(self, cached, parsed, skipped, masked, virtuals, errors, total):
+        OperationCompleted.__init__(self, total, "Recipe parsing Completed")
+        self.cached = cached
+        self.parsed = parsed
+        self.skipped = skipped
+        self.virtuals = virtuals
+        self.masked = masked
+        self.errors = errors
+        self.sofar = cached + parsed
+
+class ParseProgress(OperationProgress):
+    """Recipe parsing progress"""
+    def __init__(self, current, total):
+        OperationProgress.__init__(self, current, total, "Recipe parsing")
+
+
+class CacheLoadStarted(OperationStarted):
+    """Loading of the dependency cache has begun"""
+    def __init__(self, total):
+        OperationStarted.__init__(self, "Loading cache Started")
+        self.total = total
+
+class CacheLoadProgress(OperationProgress):
+    """Cache loading progress"""
+    def __init__(self, current, total):
+        OperationProgress.__init__(self, current, total, "Loading cache")
+
+class CacheLoadCompleted(OperationCompleted):
+    """Cache loading is complete"""
+    def __init__(self, total, num_entries):
+        OperationCompleted.__init__(self, total, "Loading cache Completed")
+        self.num_entries = num_entries
+
+class TreeDataPreparationStarted(OperationStarted):
+    """Tree data preparation started"""
+    def __init__(self):
+        OperationStarted.__init__(self, "Preparing tree data Started")
+
+class TreeDataPreparationProgress(OperationProgress):
+    """Tree data preparation is in progress"""
+    def __init__(self, current, total):
+        OperationProgress.__init__(self, current, total, "Preparing tree data")
+
+class TreeDataPreparationCompleted(OperationCompleted):
+    """Tree data preparation completed"""
+    def __init__(self, total):
+        OperationCompleted.__init__(self, total, "Preparing tree data Completed")
+
+class DepTreeGenerated(Event):
+    """
+    Event when a dependency tree has been generated
+    """
+
+    def __init__(self, depgraph):
+        Event.__init__(self)
+        self._depgraph = depgraph
+
+class TargetsTreeGenerated(Event):
+    """
+    Event when a set of buildable targets has been generated
+    """
+    def __init__(self, model):
+        Event.__init__(self)
+        self._model = model
+
+class ReachableStamps(Event):
+    """
+    An event listing all stamps reachable after parsing
+    which the metadata may use to clean up stale data
+    """
+
+    def __init__(self, stamps):
+        Event.__init__(self)
+        self.stamps = stamps
+
+class FilesMatchingFound(Event):
+    """
+    Event when a list of files matching the supplied pattern has
+    been generated
+    """
+    def __init__(self, pattern, matches):
+        Event.__init__(self)
+        self._pattern = pattern
+        self._matches = matches
+
+class CoreBaseFilesFound(Event):
+    """
+    Event when a list of appropriate config files has been generated
+    """
+    def __init__(self, paths):
+        Event.__init__(self)
+        self._paths = paths
+
+class ConfigFilesFound(Event):
+    """
+    Event when a list of appropriate config files has been generated
+    """
+    def __init__(self, variable, values):
+        Event.__init__(self)
+        self._variable = variable
+        self._values = values
+
+class ConfigFilePathFound(Event):
+    """
+    Event when a path for a config file has been found
+    """
+    def __init__(self, path):
+        Event.__init__(self)
+        self._path = path
+
+class MsgBase(Event):
+    """Base class for messages"""
+
+    def __init__(self, msg):
+        self._message = msg
+        Event.__init__(self)
+
+class MsgDebug(MsgBase):
+    """Debug Message"""
+
+class MsgNote(MsgBase):
+    """Note Message"""
+
+class MsgWarn(MsgBase):
+    """Warning Message"""
+
+class MsgError(MsgBase):
+    """Error Message"""
+
+class MsgFatal(MsgBase):
+    """Fatal Message"""
+
+class MsgPlain(MsgBase):
+    """General output"""
+
+class LogExecTTY(Event):
+    """Send event containing program to spawn on tty of the logger"""
+    def __init__(self, msg, prog, sleep_delay, retries):
+        Event.__init__(self)
+        self.msg = msg
+        self.prog = prog
+        self.sleep_delay = sleep_delay
+        self.retries = retries
+
+class LogHandler(logging.Handler):
+    """Dispatch logging messages as bitbake events"""
+
+    def emit(self, record):
+        if record.exc_info:
+            etype, value, tb = record.exc_info
+            if hasattr(tb, 'tb_next'):
+                tb = list(bb.exceptions.extract_traceback(tb, context=3))
+            # Need to turn the value into something the logging system can pickle
+            value = str(value)
+            record.bb_exc_info = (etype, value, tb)
+            record.exc_info = None
+        fire(record, None)
+
+    def filter(self, record):
+        record.taskpid = worker_pid
+        return True
+
+class RequestPackageInfo(Event):
+    """
+    Event to request package information
+    """
+
+class PackageInfo(Event):
+    """
+    Package information for GUI
+    """
+    def __init__(self, pkginfolist):
+        Event.__init__(self)
+        self._pkginfolist = pkginfolist
+
+class MetadataEvent(Event):
+    """
+    Generic event that target for OE-Core classes
+    to report information during asynchrous execution
+    """
+    def __init__(self, eventtype, eventdata):
+        Event.__init__(self)
+        self.type = eventtype
+        self._localdata = eventdata
+
+class SanityCheck(Event):
+    """
+    Event to run sanity checks, either raise errors or generate events as return status.
+    """
+    def __init__(self, generateevents = True):
+        Event.__init__(self)
+        self.generateevents = generateevents
+
+class SanityCheckPassed(Event):
+    """
+    Event to indicate sanity check has passed
+    """
+
+class SanityCheckFailed(Event):
+    """
+    Event to indicate sanity check has failed
+    """
+    def __init__(self, msg, network_error=False):
+        Event.__init__(self)
+        self._msg = msg
+        self._network_error = network_error
+
+class NetworkTest(Event):
+    """
+    Event to run network connectivity tests, either raise errors or generate events as return status.
+    """
+    def __init__(self, generateevents = True):
+        Event.__init__(self)
+        self.generateevents = generateevents
+
+class NetworkTestPassed(Event):
+    """
+    Event to indicate network test has passed
+    """
+
+class NetworkTestFailed(Event):
+    """
+    Event to indicate network test has failed
+    """
+

+ 91 - 0
bitbake/lib/bb/exceptions.py

@@ -0,0 +1,91 @@
+from __future__ import absolute_import
+import inspect
+import traceback
+import bb.namedtuple_with_abc
+from collections import namedtuple
+
+
+class TracebackEntry(namedtuple.abc):
+    """Pickleable representation of a traceback entry"""
+    _fields = 'filename lineno function args code_context index'
+    _header = '  File "{0.filename}", line {0.lineno}, in {0.function}{0.args}'
+
+    def format(self, formatter=None):
+        if not self.code_context:
+            return self._header.format(self) + '\n'
+
+        formatted = [self._header.format(self) + ':\n']
+
+        for lineindex, line in enumerate(self.code_context):
+            if formatter:
+                line = formatter(line)
+
+            if lineindex == self.index:
+                formatted.append('    >%s' % line)
+            else:
+                formatted.append('     %s' % line)
+        return formatted
+
+    def __str__(self):
+        return ''.join(self.format())
+
+def _get_frame_args(frame):
+    """Get the formatted arguments and class (if available) for a frame"""
+    arginfo = inspect.getargvalues(frame)
+
+    try:
+        if not arginfo.args:
+            return '', None
+    # There have been reports from the field of python 2.6 which doesn't 
+    # return a namedtuple here but simply a tuple so fallback gracefully if
+    # args isn't present.
+    except AttributeError:
+        return '', None
+
+    firstarg = arginfo.args[0]
+    if firstarg == 'self':
+        self = arginfo.locals['self']
+        cls = self.__class__.__name__
+
+        arginfo.args.pop(0)
+        del arginfo.locals['self']
+    else:
+        cls = None
+
+    formatted = inspect.formatargvalues(*arginfo)
+    return formatted, cls
+
+def extract_traceback(tb, context=1):
+    frames = inspect.getinnerframes(tb, context)
+    for frame, filename, lineno, function, code_context, index in frames:
+        formatted_args, cls = _get_frame_args(frame)
+        if cls:
+            function = '%s.%s' % (cls, function)
+        yield TracebackEntry(filename, lineno, function, formatted_args,
+                             code_context, index)
+
+def format_extracted(extracted, formatter=None, limit=None):
+    if limit:
+        extracted = extracted[-limit:]
+
+    formatted = []
+    for tracebackinfo in extracted:
+        formatted.extend(tracebackinfo.format(formatter))
+    return formatted
+
+
+def format_exception(etype, value, tb, context=1, limit=None, formatter=None):
+    formatted = ['Traceback (most recent call last):\n']
+
+    if hasattr(tb, 'tb_next'):
+        tb = extract_traceback(tb, context)
+
+    formatted.extend(format_extracted(tb, formatter, limit))
+    formatted.extend(traceback.format_exception_only(etype, value))
+    return formatted
+
+def to_string(exc):
+    if isinstance(exc, SystemExit):
+        if not isinstance(exc.code, basestring):
+            return 'Exited with "%d"' % exc.code
+    return str(exc)

+ 1751 - 0
bitbake/lib/bb/fetch2/__init__.py

@@ -0,0 +1,1751 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+"""
+
+# Copyright (C) 2003, 2004  Chris Larson
+# Copyright (C) 2012  Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+from __future__ import absolute_import
+from __future__ import print_function
+import os, re
+import signal
+import logging
+import urllib
+import urlparse
+import bb.persist_data, bb.utils
+import bb.checksum
+from bb import data
+import bb.process
+import subprocess
+
+__version__ = "2"
+_checksum_cache = bb.checksum.FileChecksumCache()
+
+logger = logging.getLogger("BitBake.Fetcher")
+
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+    logger.info("Importing cPickle failed. "
+                "Falling back to a very slow implementation.")
+
+class BBFetchException(Exception):
+    """Class all fetch exceptions inherit from"""
+    def __init__(self, message):
+         self.msg = message
+         Exception.__init__(self, message)
+
+    def __str__(self):
+         return self.msg
+
+class UntrustedUrl(BBFetchException):
+    """Exception raised when encountering a host not listed in BB_ALLOWED_NETWORKS"""
+    def __init__(self, url, message=''):
+        if message:
+            msg = message
+        else:
+            msg = "The URL: '%s' is not trusted and cannot be used" % url
+        self.url = url
+        BBFetchException.__init__(self, msg)
+        self.args = (url,)
+
+class MalformedUrl(BBFetchException):
+    """Exception raised when encountering an invalid url"""
+    def __init__(self, url, message=''):
+         if message:
+             msg = message
+         else:
+             msg = "The URL: '%s' is invalid and cannot be interpreted" % url
+         self.url = url
+         BBFetchException.__init__(self, msg)
+         self.args = (url,)
+
+class FetchError(BBFetchException):
+    """General fetcher exception when something happens incorrectly"""
+    def __init__(self, message, url = None):
+         if url:
+            msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
+         else:
+            msg = "Fetcher failure: %s" % message
+         self.url = url
+         BBFetchException.__init__(self, msg)
+         self.args = (message, url)
+
+class ChecksumError(FetchError):
+    """Exception when mismatched checksum encountered"""
+    def __init__(self, message, url = None, checksum = None):
+        self.checksum = checksum
+        FetchError.__init__(self, message, url)
+
+class NoChecksumError(FetchError):
+    """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
+
+class UnpackError(BBFetchException):
+    """General fetcher exception when something happens incorrectly when unpacking"""
+    def __init__(self, message, url):
+         msg = "Unpack failure for URL: '%s'. %s" % (url, message)
+         self.url = url
+         BBFetchException.__init__(self, msg)
+         self.args = (message, url)
+
+class NoMethodError(BBFetchException):
+    """Exception raised when there is no method to obtain a supplied url or set of urls"""
+    def __init__(self, url):
+         msg = "Could not find a fetcher which supports the URL: '%s'" % url
+         self.url = url
+         BBFetchException.__init__(self, msg)
+         self.args = (url,)
+
+class MissingParameterError(BBFetchException):
+    """Exception raised when a fetch method is missing a critical parameter in the url"""
+    def __init__(self, missing, url):
+         msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
+         self.url = url
+         self.missing = missing
+         BBFetchException.__init__(self, msg)
+         self.args = (missing, url)
+
+class ParameterError(BBFetchException):
+    """Exception raised when a url cannot be proccessed due to invalid parameters."""
+    def __init__(self, message, url):
+         msg = "URL: '%s' has invalid parameters. %s" % (url, message)
+         self.url = url
+         BBFetchException.__init__(self, msg)
+         self.args = (message, url)
+
+class NetworkAccess(BBFetchException):
+    """Exception raised when network access is disabled but it is required."""
+    def __init__(self, url, cmd):
+         msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
+         self.url = url
+         self.cmd = cmd
+         BBFetchException.__init__(self, msg)
+         self.args = (url, cmd)
+
+class NonLocalMethod(Exception):
+    def __init__(self):
+        Exception.__init__(self)
+
+
+class URI(object):
+    """
+    A class representing a generic URI, with methods for
+    accessing the URI components, and stringifies to the
+    URI.
+
+    It is constructed by calling it with a URI, or setting
+    the attributes manually:
+
+     uri = URI("http://example.com/")
+
+     uri = URI()
+     uri.scheme = 'http'
+     uri.hostname = 'example.com'
+     uri.path = '/'
+
+    It has the following attributes:
+
+      * scheme (read/write)
+      * userinfo (authentication information) (read/write)
+        * username (read/write)
+        * password (read/write)
+
+        Note, password is deprecated as of RFC 3986.
+
+      * hostname (read/write)
+      * port (read/write)
+      * hostport (read only)
+        "hostname:port", if both are set, otherwise just "hostname"
+      * path (read/write)
+      * path_quoted (read/write)
+        A URI quoted version of path
+      * params (dict) (read/write)
+      * query (dict) (read/write)
+      * relative (bool) (read only)
+        True if this is a "relative URI", (e.g. file:foo.diff)
+
+    It stringifies to the URI itself.
+
+    Some notes about relative URIs: while it's specified that
+    a URI beginning with <scheme>:// should either be directly
+    followed by a hostname or a /, the old URI handling of the
+    fetch2 library did not comform to this. Therefore, this URI
+    class has some kludges to make sure that URIs are parsed in
+    a way comforming to bitbake's current usage. This URI class
+    supports the following:
+
+     file:relative/path.diff (IETF compliant)
+     git:relative/path.git (IETF compliant)
+     git:///absolute/path.git (IETF compliant)
+     file:///absolute/path.diff (IETF compliant)
+
+     file://relative/path.diff (not IETF compliant)
+
+    But it does not support the following:
+
+     file://hostname/absolute/path.diff (would be IETF compliant)
+
+    Note that the last case only applies to a list of
+    "whitelisted" schemes (currently only file://), that requires
+    its URIs to not have a network location.
+    """
+
+    _relative_schemes = ['file', 'git']
+    _netloc_forbidden = ['file']
+
+    def __init__(self, uri=None):
+        self.scheme = ''
+        self.userinfo = ''
+        self.hostname = ''
+        self.port = None
+        self._path = ''
+        self.params = {}
+        self.query = {}
+        self.relative = False
+
+        if not uri:
+            return
+
+        # We hijack the URL parameters, since the way bitbake uses
+        # them are not quite RFC compliant.
+        uri, param_str = (uri.split(";", 1) + [None])[:2]
+
+        urlp = urlparse.urlparse(uri)
+        self.scheme = urlp.scheme
+
+        reparse = 0
+
+        # Coerce urlparse to make URI scheme use netloc
+        if not self.scheme in urlparse.uses_netloc:
+            urlparse.uses_params.append(self.scheme)
+            reparse = 1
+
+        # Make urlparse happy(/ier) by converting local resources
+        # to RFC compliant URL format. E.g.:
+        #   file://foo.diff -> file:foo.diff
+        if urlp.scheme in self._netloc_forbidden:
+            uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
+            reparse = 1
+
+        if reparse:
+            urlp = urlparse.urlparse(uri)
+
+        # Identify if the URI is relative or not
+        if urlp.scheme in self._relative_schemes and \
+           re.compile("^\w+:(?!//)").match(uri):
+            self.relative = True
+
+        if not self.relative:
+            self.hostname = urlp.hostname or ''
+            self.port = urlp.port
+
+            self.userinfo += urlp.username or ''
+
+            if urlp.password:
+                self.userinfo += ':%s' % urlp.password
+
+        self.path = urllib.unquote(urlp.path)
+
+        if param_str:
+            self.params = self._param_str_split(param_str, ";")
+        if urlp.query:
+            self.query = self._param_str_split(urlp.query, "&")
+
+    def __str__(self):
+        userinfo = self.userinfo
+        if userinfo:
+            userinfo += '@'
+
+        return "%s:%s%s%s%s%s%s" % (
+            self.scheme,
+            '' if self.relative else '//',
+            userinfo,
+            self.hostport,
+            self.path_quoted,
+            self._query_str(),
+            self._param_str())
+
+    def _param_str(self):
+        return (
+            ''.join([';', self._param_str_join(self.params, ";")])
+            if self.params else '')
+
+    def _query_str(self):
+        return (
+            ''.join(['?', self._param_str_join(self.query, "&")])
+            if self.query else '')
+
+    def _param_str_split(self, string, elmdelim, kvdelim="="):
+        ret = {}
+        for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
+            ret[k] = v
+        return ret
+
+    def _param_str_join(self, dict_, elmdelim, kvdelim="="):
+        return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
+
+    @property
+    def hostport(self):
+        if not self.port:
+            return self.hostname
+        return "%s:%d" % (self.hostname, self.port)
+
+    @property
+    def path_quoted(self):
+        return urllib.quote(self.path)
+
+    @path_quoted.setter
+    def path_quoted(self, path):
+        self.path = urllib.unquote(path)
+
+    @property
+    def path(self):
+        return self._path
+
+    @path.setter
+    def path(self, path):
+        self._path = path
+
+        if not path or re.compile("^/").match(path):
+            self.relative = False
+        else:
+            self.relative = True
+
+    @property
+    def username(self):
+        if self.userinfo:
+            return (self.userinfo.split(":", 1))[0]
+        return ''
+
+    @username.setter
+    def username(self, username):
+        password = self.password
+        self.userinfo = username
+        if password:
+            self.userinfo += ":%s" % password
+
+    @property
+    def password(self):
+        if self.userinfo and ":" in self.userinfo:
+            return (self.userinfo.split(":", 1))[1]
+        return ''
+
+    @password.setter
+    def password(self, password):
+        self.userinfo = "%s:%s" % (self.username, password)
+
+def decodeurl(url):
+    """Decodes an URL into the tokens (scheme, network location, path,
+    user, password, parameters).
+    """
+
+    m = re.compile('(?P<type>[^:]*)://((?P<user>[^/]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
+    if not m:
+        raise MalformedUrl(url)
+
+    type = m.group('type')
+    location = m.group('location')
+    if not location:
+        raise MalformedUrl(url)
+    user = m.group('user')
+    parm = m.group('parm')
+
+    locidx = location.find('/')
+    if locidx != -1 and type.lower() != 'file':
+        host = location[:locidx]
+        path = location[locidx:]
+    elif type.lower() == 'file':
+        host = ""
+        path = location
+    else:
+        host = location
+        path = ""
+    if user:
+        m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
+        if m:
+            user = m.group('user')
+            pswd = m.group('pswd')
+    else:
+        user = ''
+        pswd = ''
+
+    p = {}
+    if parm:
+        for s in parm.split(';'):
+            if s:
+                if not '=' in s:
+                    raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
+                s1, s2 = s.split('=')
+                p[s1] = s2
+
+    return type, host, urllib.unquote(path), user, pswd, p
+
+def encodeurl(decoded):
+    """Encodes a URL from tokens (scheme, network location, path,
+    user, password, parameters).
+    """
+
+    type, host, path, user, pswd, p = decoded
+
+    if not path:
+        raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
+    if not type:
+        raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
+    url = '%s://' % type
+    if user and type != "file":
+        url += "%s" % user
+        if pswd:
+            url += ":%s" % pswd
+        url += "@"
+    if host and type != "file":
+        url += "%s" % host
+    # Standardise path to ensure comparisons work
+    while '//' in path:
+        path = path.replace("//", "/")
+    url += "%s" % urllib.quote(path)
+    if p:
+        for parm in p:
+            url += ";%s=%s" % (parm, p[parm])
+
+    return url
+
+def uri_replace(ud, uri_find, uri_replace, replacements, d):
+    if not ud.url or not uri_find or not uri_replace:
+        logger.error("uri_replace: passed an undefined value, not replacing")
+        return None
+    uri_decoded = list(decodeurl(ud.url))
+    uri_find_decoded = list(decodeurl(uri_find))
+    uri_replace_decoded = list(decodeurl(uri_replace))
+    logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
+    result_decoded = ['', '', '', '', '', {}]
+    for loc, i in enumerate(uri_find_decoded):
+        result_decoded[loc] = uri_decoded[loc]
+        regexp = i
+        if loc == 0 and regexp and not regexp.endswith("$"):
+            # Leaving the type unanchored can mean "https" matching "file" can become "files"
+            # which is clearly undesirable.
+            regexp += "$"
+        if loc == 5:
+            # Handle URL parameters
+            if i:
+                # Any specified URL parameters must match
+                for k in uri_replace_decoded[loc]:
+                    if uri_decoded[loc][k] != uri_replace_decoded[loc][k]:
+                        return None
+            # Overwrite any specified replacement parameters
+            for k in uri_replace_decoded[loc]:
+                for l in replacements:
+                    uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
+                result_decoded[loc][k] = uri_replace_decoded[loc][k]
+        elif (re.match(regexp, uri_decoded[loc])):
+            if not uri_replace_decoded[loc]:
+                result_decoded[loc] = ""    
+            else:
+                for k in replacements:
+                    uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
+                #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
+                result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1)
+            if loc == 2:
+                # Handle path manipulations
+                basename = None
+                if uri_decoded[0] != uri_replace_decoded[0] and ud.mirrortarball:
+                    # If the source and destination url types differ, must be a mirrortarball mapping
+                    basename = os.path.basename(ud.mirrortarball)
+                    # Kill parameters, they make no sense for mirror tarballs
+                    uri_decoded[5] = {}
+                elif ud.localpath and ud.method.supports_checksum(ud):
+                    basename = os.path.basename(ud.localpath)
+                if basename and not result_decoded[loc].endswith(basename):
+                    result_decoded[loc] = os.path.join(result_decoded[loc], basename)
+        else:
+            return None
+    result = encodeurl(result_decoded)
+    if result == ud.url:
+        return None
+    logger.debug(2, "For url %s returning %s" % (ud.url, result))
+    return result
+
+methods = []
+urldata_cache = {}
+saved_headrevs = {}
+
+def fetcher_init(d):
+    """
+    Called to initialize the fetchers once the configuration data is known.
+    Calls before this must not hit the cache.
+    """
+    # When to drop SCM head revisions controlled by user policy
+    srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear"
+    if srcrev_policy == "cache":
+        logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
+    elif srcrev_policy == "clear":
+        logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
+        revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
+        try:
+            bb.fetch2.saved_headrevs = revs.items()
+        except:
+            pass
+        revs.clear()
+    else:
+        raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
+
+    _checksum_cache.init_cache(d)
+
+    for m in methods:
+        if hasattr(m, "init"):
+            m.init(d)
+
+def fetcher_parse_save():
+    _checksum_cache.save_extras()
+
+def fetcher_parse_done():
+    _checksum_cache.save_merge()
+
+def fetcher_compare_revisions():
+    """
+    Compare the revisions in the persistant cache with current values and
+    return true/false on whether they've changed.
+    """
+
+    data = bb.persist_data.persist('BB_URI_HEADREVS', d).items()
+    data2 = bb.fetch2.saved_headrevs
+
+    changed = False
+    for key in data:
+        if key not in data2 or data2[key] != data[key]:
+            logger.debug(1, "%s changed", key)
+            changed = True
+            return True
+        else:
+            logger.debug(2, "%s did not change", key)
+    return False
+
+def mirror_from_string(data):
+    return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
+
+def verify_checksum(ud, d, precomputed={}):
+    """
+    verify the MD5 and SHA256 checksum for downloaded src
+
+    Raises a FetchError if one or both of the SRC_URI checksums do not match
+    the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
+    checksums specified.
+
+    Returns a dict of checksums that can be stored in a done stamp file and
+    passed in as precomputed parameter in a later call to avoid re-computing
+    the checksums from the file. This allows verifying the checksums of the
+    file against those in the recipe each time, rather than only after
+    downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
+    """
+
+    _MD5_KEY = "md5"
+    _SHA256_KEY = "sha256"
+
+    if ud.ignore_checksums or not ud.method.supports_checksum(ud):
+        return {}
+
+    if _MD5_KEY in precomputed:
+        md5data = precomputed[_MD5_KEY]
+    else:
+        md5data = bb.utils.md5_file(ud.localpath)
+
+    if _SHA256_KEY in precomputed:
+        sha256data = precomputed[_SHA256_KEY]
+    else:
+        sha256data = bb.utils.sha256_file(ud.localpath)
+
+    if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected:
+        # If strict checking enabled and neither sum defined, raise error
+        strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0"
+        if strict == "1":
+            logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
+                             'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
+                             (ud.localpath, ud.md5_name, md5data,
+                              ud.sha256_name, sha256data))
+            raise NoChecksumError('Missing SRC_URI checksum', ud.url)
+
+        # Log missing sums so user can more easily add them
+        logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
+                    'SRC_URI[%s] = "%s"',
+                    ud.localpath, ud.md5_name, md5data)
+        logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
+                    'SRC_URI[%s] = "%s"',
+                    ud.localpath, ud.sha256_name, sha256data)
+
+    # We want to alert the user if a checksum is defined in the recipe but
+    # it does not match.
+    msg = ""
+    mismatch = False
+    if ud.md5_expected and ud.md5_expected != md5data:
+        msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
+        mismatch = True;
+
+    if ud.sha256_expected and ud.sha256_expected != sha256data:
+        msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
+        mismatch = True;
+
+    if mismatch:
+        msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
+
+    if len(msg):
+        raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
+
+    return {
+        _MD5_KEY: md5data,
+        _SHA256_KEY: sha256data
+    }
+
+
+def verify_donestamp(ud, d, origud=None):
+    """
+    Check whether the done stamp file has the right checksums (if the fetch
+    method supports them). If it doesn't, delete the done stamp and force
+    a re-download.
+
+    Returns True, if the donestamp exists and is valid, False otherwise. When
+    returning False, any existing done stamps are removed.
+    """
+    if not ud.needdonestamp:
+        return True
+
+    if not os.path.exists(ud.donestamp):
+        return False
+
+    if (not ud.method.supports_checksum(ud) or
+        (origud and not origud.method.supports_checksum(origud))):
+        # done stamp exists, checksums not supported; assume the local file is
+        # current
+        return True
+
+    if not os.path.exists(ud.localpath):
+        # done stamp exists, but the downloaded file does not; the done stamp
+        # must be incorrect, re-trigger the download
+        bb.utils.remove(ud.donestamp)
+        return False
+
+    precomputed_checksums = {}
+    # Only re-use the precomputed checksums if the donestamp is newer than the
+    # file. Do not rely on the mtime of directories, though. If ud.localpath is
+    # a directory, there will probably not be any checksums anyway.
+    if (os.path.isdir(ud.localpath) or
+            os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
+        try:
+            with open(ud.donestamp, "rb") as cachefile:
+                pickled = pickle.Unpickler(cachefile)
+                precomputed_checksums.update(pickled.load())
+        except Exception as e:
+            # Avoid the warnings on the upgrade path from emtpy done stamp
+            # files to those containing the checksums.
+            if not isinstance(e, EOFError):
+                # Ignore errors, they aren't fatal
+                logger.warn("Couldn't load checksums from donestamp %s: %s "
+                            "(msg: %s)" % (ud.donestamp, type(e).__name__,
+                                           str(e)))
+
+    try:
+        checksums = verify_checksum(ud, d, precomputed_checksums)
+        # If the cache file did not have the checksums, compute and store them
+        # as an upgrade path from the previous done stamp file format.
+        if checksums != precomputed_checksums:
+            with open(ud.donestamp, "wb") as cachefile:
+                p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
+                p.dump(checksums)
+        return True
+    except ChecksumError as e:
+        # Checksums failed to verify, trigger re-download and remove the
+        # incorrect stamp file.
+        logger.warn("Checksum mismatch for local file %s\n"
+                    "Cleaning and trying again." % ud.localpath)
+        rename_bad_checksum(ud, e.checksum)
+        bb.utils.remove(ud.donestamp)
+    return False
+
+
+def update_stamp(ud, d):
+    """
+        donestamp is file stamp indicating the whole fetching is done
+        this function update the stamp after verifying the checksum
+    """
+    if not ud.needdonestamp:
+        return
+
+    if os.path.exists(ud.donestamp):
+        # Touch the done stamp file to show active use of the download
+        try:
+            os.utime(ud.donestamp, None)
+        except:
+            # Errors aren't fatal here
+            pass
+    else:
+        checksums = verify_checksum(ud, d)
+        # Store the checksums for later re-verification against the recipe
+        with open(ud.donestamp, "wb") as cachefile:
+            p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
+            p.dump(checksums)
+
+def subprocess_setup():
+    # Python installs a SIGPIPE handler by default. This is usually not what
+    # non-Python subprocesses expect.
+    # SIGPIPE errors are known issues with gzip/bash
+    signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+
+def get_autorev(d):
+    #  only not cache src rev in autorev case
+    if d.getVar('BB_SRCREV_POLICY', True) != "cache":
+        d.setVar('__BB_DONT_CACHE', '1')
+    return "AUTOINC"
+
+def get_srcrev(d, method_name='sortable_revision'):
+    """
+    Return the revsion string, usually for use in the version string (PV) of the current package
+    Most packages usually only have one SCM so we just pass on the call.
+    In the multi SCM case, we build a value based on SRCREV_FORMAT which must
+    have been set.
+
+    The idea here is that we put the string "AUTOINC+" into return value if the revisions are not 
+    incremental, other code is then responsible for turning that into an increasing value (if needed)
+
+    A method_name can be supplied to retrieve an alternatively formatted revision from a fetcher, if
+    that fetcher provides a method with the given name and the same signature as sortable_revision.
+    """
+
+    scms = []
+    fetcher = Fetch(d.getVar('SRC_URI', True).split(), d)
+    urldata = fetcher.ud
+    for u in urldata:
+        if urldata[u].method.supports_srcrev():
+            scms.append(u)
+
+    if len(scms) == 0:
+        raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
+
+    if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
+        autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
+        if len(rev) > 10:
+            rev = rev[:10]
+        if autoinc:
+            return "AUTOINC+" + rev
+        return rev
+
+    #
+    # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
+    #
+    format = d.getVar('SRCREV_FORMAT', True)
+    if not format:
+        raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
+
+    seenautoinc = False
+    for scm in scms:
+        ud = urldata[scm]
+        for name in ud.names:
+            autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
+            seenautoinc = seenautoinc or autoinc
+            if len(rev) > 10:
+                rev = rev[:10]
+            format = format.replace(name, rev)
+    if seenautoinc:
+       format = "AUTOINC+" + format
+
+    return format
+
+def localpath(url, d):
+    fetcher = bb.fetch2.Fetch([url], d)
+    return fetcher.localpath(url)
+
+def runfetchcmd(cmd, d, quiet=False, cleanup=None):
+    """
+    Run cmd returning the command output
+    Raise an error if interrupted or cmd fails
+    Optionally echo command output to stdout
+    Optionally remove the files/directories listed in cleanup upon failure
+    """
+
+    # Need to export PATH as binary could be in metadata paths
+    # rather than host provided
+    # Also include some other variables.
+    # FIXME: Should really include all export varaiables?
+    exportvars = ['HOME', 'PATH',
+                  'HTTP_PROXY', 'http_proxy',
+                  'HTTPS_PROXY', 'https_proxy',
+                  'FTP_PROXY', 'ftp_proxy',
+                  'FTPS_PROXY', 'ftps_proxy',
+                  'NO_PROXY', 'no_proxy',
+                  'ALL_PROXY', 'all_proxy',
+                  'GIT_PROXY_COMMAND',
+                  'GIT_SSL_CAINFO',
+                  'GIT_SMART_HTTP',
+                  'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
+                  'SOCKS5_USER', 'SOCKS5_PASSWD']
+
+    if not cleanup:
+        cleanup = []
+
+    for var in exportvars:
+        val = d.getVar(var, True)
+        if val:
+            cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
+
+    logger.debug(1, "Running %s", cmd)
+
+    success = False
+    error_message = ""
+
+    try:
+        (output, errors) = bb.process.run(cmd, shell=True, stderr=subprocess.PIPE)
+        success = True
+    except bb.process.NotFoundError as e:
+        error_message = "Fetch command %s" % (e.command)
+    except bb.process.ExecutionError as e:
+        if e.stdout:
+            output = "output:\n%s\n%s" % (e.stdout, e.stderr)
+        elif e.stderr:
+            output = "output:\n%s" % e.stderr
+        else:
+            output = "no output"
+        error_message = "Fetch command failed with exit code %s, %s" % (e.exitcode, output)
+    except bb.process.CmdError as e:
+        error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
+    if not success:
+        for f in cleanup:
+            try:
+                bb.utils.remove(f, True)
+            except OSError:
+                pass
+
+        raise FetchError(error_message)
+
+    return output
+
+def check_network_access(d, info = "", url = None):
+    """
+    log remote network access, and error if BB_NO_NETWORK is set
+    """
+    if d.getVar("BB_NO_NETWORK", True) == "1":
+        raise NetworkAccess(url, info)
+    else:
+        logger.debug(1, "Fetcher accessed the network with the command %s" % info)
+
+def build_mirroruris(origud, mirrors, ld):
+    uris = []
+    uds = []
+
+    replacements = {}
+    replacements["TYPE"] = origud.type
+    replacements["HOST"] = origud.host
+    replacements["PATH"] = origud.path
+    replacements["BASENAME"] = origud.path.split("/")[-1]
+    replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
+
+    def adduri(ud, uris, uds, mirrors):
+        for line in mirrors:
+            try:
+                (find, replace) = line
+            except ValueError:
+                continue
+            newuri = uri_replace(ud, find, replace, replacements, ld)
+            if not newuri or newuri in uris or newuri == origud.url:
+                continue
+
+            if not trusted_network(ld, newuri):
+                logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" %  (newuri))
+                continue
+
+            # Create a local copy of the mirrors minus the current line
+            # this will prevent us from recursively processing the same line
+            # as well as indirect recursion A -> B -> C -> A
+            localmirrors = list(mirrors)
+            localmirrors.remove(line)
+
+            try:
+                newud = FetchData(newuri, ld)
+                newud.setup_localpath(ld)
+            except bb.fetch2.BBFetchException as e:
+                logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
+                logger.debug(1, str(e))
+                try:
+                    # setup_localpath of file:// urls may fail, we should still see 
+                    # if mirrors of the url exist
+                    adduri(newud, uris, uds, localmirrors)
+                except UnboundLocalError:
+                    pass
+                continue   
+            uris.append(newuri)
+            uds.append(newud)
+
+            adduri(newud, uris, uds, localmirrors)
+
+    adduri(origud, uris, uds, mirrors)
+
+    return uris, uds
+
+def rename_bad_checksum(ud, suffix):
+    """
+    Renames files to have suffix from parameter
+    """
+
+    if ud.localpath is None:
+        return
+
+    new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
+    bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
+    bb.utils.movefile(ud.localpath, new_localpath)
+
+
+def try_mirror_url(fetch, origud, ud, ld, check = False):
+    # Return of None or a value means we're finished
+    # False means try another url
+    try:
+        if check:
+            found = ud.method.checkstatus(fetch, ud, ld)
+            if found:
+                return found
+            return False
+
+        os.chdir(ld.getVar("DL_DIR", True))
+
+        if not verify_donestamp(ud, ld, origud) or ud.method.need_update(ud, ld):
+            ud.method.download(ud, ld)
+            if hasattr(ud.method,"build_mirror_data"):
+                ud.method.build_mirror_data(ud, ld)
+
+        if not ud.localpath or not os.path.exists(ud.localpath):
+            return False
+
+        if ud.localpath == origud.localpath:
+            return ud.localpath
+
+        # We may be obtaining a mirror tarball which needs further processing by the real fetcher
+        # If that tarball is a local file:// we need to provide a symlink to it
+        dldir = ld.getVar("DL_DIR", True)
+        if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
+                and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
+            # Create donestamp in old format to avoid triggering a re-download
+            if ud.donestamp:
+                bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
+                open(ud.donestamp, 'w').close()
+            dest = os.path.join(dldir, os.path.basename(ud.localpath))
+            if not os.path.exists(dest):
+                os.symlink(ud.localpath, dest)
+            if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
+                origud.method.download(origud, ld)
+                if hasattr(origud.method,"build_mirror_data"):
+                    origud.method.build_mirror_data(origud, ld)
+            return origud.localpath
+        # Otherwise the result is a local file:// and we symlink to it
+        if not os.path.exists(origud.localpath):
+            if os.path.islink(origud.localpath):
+                # Broken symbolic link
+                os.unlink(origud.localpath)
+
+            os.symlink(ud.localpath, origud.localpath)
+        update_stamp(origud, ld)
+        return ud.localpath
+
+    except bb.fetch2.NetworkAccess:
+        raise
+
+    except bb.fetch2.BBFetchException as e:
+        if isinstance(e, ChecksumError):
+            logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
+            logger.warn(str(e))
+            rename_bad_checksum(ud, e.checksum)
+        elif isinstance(e, NoChecksumError):
+            raise
+        else:
+            logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
+            logger.debug(1, str(e))
+        try:
+            ud.method.clean(ud, ld)
+        except UnboundLocalError:
+            pass
+        return False
+
+def try_mirrors(fetch, d, origud, mirrors, check = False):
+    """
+    Try to use a mirrored version of the sources.
+    This method will be automatically called before the fetchers go.
+
+    d Is a bb.data instance
+    uri is the original uri we're trying to download
+    mirrors is the list of mirrors we're going to try
+    """
+    ld = d.createCopy()
+
+    uris, uds = build_mirroruris(origud, mirrors, ld)
+
+    for index, uri in enumerate(uris):
+        ret = try_mirror_url(fetch, origud, uds[index], ld, check)
+        if ret != False:
+            return ret
+    return None
+
+def trusted_network(d, url):
+    """
+    Use a trusted url during download if networking is enabled and
+    BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
+    Note: modifies SRC_URI & mirrors.
+    """
+    if d.getVar('BB_NO_NETWORK', True) == "1":
+        return True
+
+    pkgname = d.expand(d.getVar('PN', False))
+    trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
+
+    if not trusted_hosts:
+        trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS', True)
+
+    # Not enabled.
+    if not trusted_hosts:
+        return True
+
+    scheme, network, path, user, passwd, param = decodeurl(url)
+
+    if not network:
+        return True
+
+    network = network.lower()
+
+    for host in trusted_hosts.split(" "):
+        host = host.lower()
+        if host.startswith("*.") and ("." + network).endswith(host[1:]):
+            return True
+        if host == network:
+            return True
+
+    return False
+
+def srcrev_internal_helper(ud, d, name):
+    """
+    Return:
+        a) a source revision if specified
+        b) latest revision if SRCREV="AUTOINC"
+        c) None if not specified
+    """
+
+    srcrev = None
+    pn = d.getVar("PN", True)
+    attempts = []
+    if name != '' and pn:
+        attempts.append("SRCREV_%s_pn-%s" % (name, pn))
+    if name != '':
+        attempts.append("SRCREV_%s" % name)
+    if pn:
+        attempts.append("SRCREV_pn-%s" % pn)
+    attempts.append("SRCREV")
+
+    for a in attempts:
+        srcrev = d.getVar(a, True)              
+        if srcrev and srcrev != "INVALID":
+            break
+
+    if 'rev' in ud.parm and 'tag' in ud.parm:
+        raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
+
+    if 'rev' in ud.parm or 'tag' in ud.parm:
+        if 'rev' in ud.parm:
+            parmrev = ud.parm['rev']
+        else:
+            parmrev = ud.parm['tag']
+        if srcrev == "INVALID" or not srcrev:
+            return parmrev
+        if srcrev != parmrev:
+            raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please spcify one valid value" % (srcrev, parmrev))
+        return parmrev
+
+    if srcrev == "INVALID" or not srcrev:
+        raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
+    if srcrev == "AUTOINC":
+        srcrev = ud.method.latest_revision(ud, d, name)
+
+    return srcrev
+
+def get_checksum_file_list(d):
+    """ Get a list of files checksum in SRC_URI
+
+    Returns the resolved local paths of all local file entries in
+    SRC_URI as a space-separated string
+    """
+    fetch = Fetch([], d, cache = False, localonly = True)
+
+    dl_dir = d.getVar('DL_DIR', True)
+    filelist = []
+    for u in fetch.urls:
+        ud = fetch.ud[u]
+
+        if ud and isinstance(ud.method, local.Local):
+            paths = ud.method.localpaths(ud, d)
+            for f in paths:
+                pth = ud.decodedurl
+                if '*' in pth:
+                    f = os.path.join(os.path.abspath(f), pth)
+                if f.startswith(dl_dir):
+                    # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
+                    if os.path.exists(f):
+                        bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
+                    else:
+                        bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
+                filelist.append(f + ":" + str(os.path.exists(f)))
+
+    return " ".join(filelist)
+
+def get_file_checksums(filelist, pn):
+    """Get a list of the checksums for a list of local files
+
+    Returns the checksums for a list of local files, caching the results as
+    it proceeds
+
+    """
+    return _checksum_cache.get_checksums(filelist, pn)
+
+
+class FetchData(object):
+    """
+    A class which represents the fetcher state for a given URI.
+    """
+    def __init__(self, url, d, localonly = False):
+        # localpath is the location of a downloaded result. If not set, the file is local.
+        self.donestamp = None
+        self.needdonestamp = True
+        self.localfile = ""
+        self.localpath = None
+        self.lockfile = None
+        self.mirrortarball = None
+        self.basename = None
+        self.basepath = None
+        (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
+        self.date = self.getSRCDate(d)
+        self.url = url
+        if not self.user and "user" in self.parm:
+            self.user = self.parm["user"]
+        if not self.pswd and "pswd" in self.parm:
+            self.pswd = self.parm["pswd"]
+        self.setup = False
+
+        if "name" in self.parm:
+            self.md5_name = "%s.md5sum" % self.parm["name"]
+            self.sha256_name = "%s.sha256sum" % self.parm["name"]
+        else:
+            self.md5_name = "md5sum"
+            self.sha256_name = "sha256sum"
+        if self.md5_name in self.parm:
+            self.md5_expected = self.parm[self.md5_name]
+        elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
+            self.md5_expected = None
+        else:
+            self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name, True)
+        if self.sha256_name in self.parm:
+            self.sha256_expected = self.parm[self.sha256_name]
+        elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
+            self.sha256_expected = None
+        else:
+            self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name, True)
+        self.ignore_checksums = False
+
+        self.names = self.parm.get("name",'default').split(',')
+
+        self.method = None
+        for m in methods:
+            if m.supports(self, d):
+                self.method = m
+                break                
+
+        if not self.method:
+            raise NoMethodError(url)
+
+        if localonly and not isinstance(self.method, local.Local):
+            raise NonLocalMethod()
+
+        if self.parm.get("proto", None) and "protocol" not in self.parm:
+            logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
+            self.parm["protocol"] = self.parm.get("proto", None)
+
+        if hasattr(self.method, "urldata_init"):
+            self.method.urldata_init(self, d)
+
+        if "localpath" in self.parm:
+            # if user sets localpath for file, use it instead.
+            self.localpath = self.parm["localpath"]
+            self.basename = os.path.basename(self.localpath)
+        elif self.localfile:
+            self.localpath = self.method.localpath(self, d)
+
+        dldir = d.getVar("DL_DIR", True)
+
+        if not self.needdonestamp:
+            return
+
+        # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
+        if self.localpath and self.localpath.startswith(dldir):
+            basepath = self.localpath
+        elif self.localpath:
+            basepath = dldir + os.sep + os.path.basename(self.localpath)
+        elif self.basepath or self.basename:
+            basepath = dldir + os.sep + (self.basepath or self.basename)
+        else:
+             bb.fatal("Can't determine lock path for url %s" % url)
+
+        self.donestamp = basepath + '.done'
+        self.lockfile = basepath + '.lock'
+
+    def setup_revisons(self, d):
+        self.revisions = {}
+        for name in self.names:
+            self.revisions[name] = srcrev_internal_helper(self, d, name)
+
+        # add compatibility code for non name specified case
+        if len(self.names) == 1:
+            self.revision = self.revisions[self.names[0]]
+
+    def setup_localpath(self, d):
+        if not self.localpath:
+            self.localpath = self.method.localpath(self, d)
+
+    def getSRCDate(self, d):
+        """
+        Return the SRC Date for the component
+
+        d the bb.data module
+        """
+        if "srcdate" in self.parm:
+            return self.parm['srcdate']
+
+        pn = d.getVar("PN", True)
+
+        if pn:
+            return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True)
+
+        return d.getVar("SRCDATE", True) or d.getVar("DATE", True)
+
+class FetchMethod(object):
+    """Base class for 'fetch'ing data"""
+
+    def __init__(self, urls=None):
+        self.urls = []
+
+    def supports(self, urldata, d):
+        """
+        Check to see if this fetch class supports a given url.
+        """
+        return 0
+
+    def localpath(self, urldata, d):
+        """
+        Return the local filename of a given url assuming a successful fetch.
+        Can also setup variables in urldata for use in go (saving code duplication
+        and duplicate code execution)
+        """
+        return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
+
+    def supports_checksum(self, urldata):
+        """
+        Is localpath something that can be represented by a checksum?
+        """
+
+        # We cannot compute checksums for directories
+        if os.path.isdir(urldata.localpath) == True:
+            return False
+        if urldata.localpath.find("*") != -1:
+             return False
+
+        return True
+
+    def recommends_checksum(self, urldata):
+        """
+        Is the backend on where checksumming is recommended (should warnings 
+        be displayed if there is no checksum)?
+        """
+        return False
+
+    def _strip_leading_slashes(self, relpath):
+        """
+        Remove leading slash as os.path.join can't cope
+        """
+        while os.path.isabs(relpath):
+            relpath = relpath[1:]
+        return relpath
+
+    def setUrls(self, urls):
+        self.__urls = urls
+
+    def getUrls(self):
+        return self.__urls
+
+    urls = property(getUrls, setUrls, None, "Urls property")
+
+    def need_update(self, ud, d):
+        """
+        Force a fetch, even if localpath exists?
+        """
+        if os.path.exists(ud.localpath):
+            return False
+        return True
+
+    def supports_srcrev(self):
+        """
+        The fetcher supports auto source revisions (SRCREV)
+        """
+        return False
+
+    def download(self, urldata, d):
+        """
+        Fetch urls
+        Assumes localpath was called first
+        """
+        raise NoMethodError(url)
+
+    def unpack(self, urldata, rootdir, data):
+        iterate = False
+        file = urldata.localpath
+
+        # Localpath can't deal with 'dir/*' entries, so it converts them to '.',
+        # but it must be corrected back for local files copying
+        if urldata.basename == '*' and file.endswith('/.'):
+            file = '%s/%s' % (file.rstrip('/.'), urldata.path)
+
+        try:
+            unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
+        except ValueError as exc:
+            bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
+                     (file, urldata.parm.get('unpack')))
+
+        base, ext = os.path.splitext(file)
+        if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']:
+            efile = os.path.join(rootdir, os.path.basename(base))
+        else:
+            efile = file
+        cmd = None
+
+        if unpack:
+            if file.endswith('.tar'):
+                cmd = 'tar x --no-same-owner -f %s' % file
+            elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
+                cmd = 'tar xz --no-same-owner -f %s' % file
+            elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
+                cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
+            elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
+                cmd = 'gzip -dc %s > %s' % (file, efile)
+            elif file.endswith('.bz2'):
+                cmd = 'bzip2 -dc %s > %s' % (file, efile)
+            elif file.endswith('.tar.xz'):
+                cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
+            elif file.endswith('.xz'):
+                cmd = 'xz -dc %s > %s' % (file, efile)
+            elif file.endswith('.tar.lz'):
+                cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
+            elif file.endswith('.lz'):
+                cmd = 'lzip -dc %s > %s' % (file, efile)
+            elif file.endswith('.zip') or file.endswith('.jar'):
+                try:
+                    dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
+                except ValueError as exc:
+                    bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
+                             (file, urldata.parm.get('dos')))
+                cmd = 'unzip -q -o'
+                if dos:
+                    cmd = '%s -a' % cmd
+                cmd = "%s '%s'" % (cmd, file)
+            elif file.endswith('.rpm') or file.endswith('.srpm'):
+                if 'extract' in urldata.parm:
+                    unpack_file = urldata.parm.get('extract')
+                    cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
+                    iterate = True
+                    iterate_file = unpack_file
+                else:
+                    cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
+            elif file.endswith('.deb') or file.endswith('.ipk'):
+                cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file
+            elif file.endswith('.tar.7z'):
+                cmd = '7z x -so %s | tar xf - ' % file
+            elif file.endswith('.7z'):
+                cmd = '7za x -y %s 1>/dev/null' % file
+
+        # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
+        if 'subdir' in urldata.parm:
+            unpackdir = '%s/%s' % (rootdir, urldata.parm.get('subdir'))
+            bb.utils.mkdirhier(unpackdir)
+        else:
+            unpackdir = rootdir
+
+        if not unpack or not cmd:
+            # If file == dest, then avoid any copies, as we already put the file into dest!
+            dest = os.path.join(unpackdir, os.path.basename(file))
+            if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
+                destdir = '.'
+                # For file:// entries all intermediate dirs in path must be created at destination
+                if urldata.type == "file":
+                    # Trailing '/' does a copying to wrong place
+                    urlpath = urldata.path.rstrip('/')
+                    # Want files places relative to cwd so no leading '/'
+                    urlpath = urlpath.lstrip('/')
+                    if urlpath.find("/") != -1:
+                        destdir = urlpath.rsplit("/", 1)[0] + '/'
+                        bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
+                cmd = 'cp -fpPR %s %s' % (file, destdir)
+
+        if not cmd:
+            return
+
+        # Change to unpackdir before executing command
+        save_cwd = os.getcwd();
+        os.chdir(unpackdir)
+
+        path = data.getVar('PATH', True)
+        if path:
+            cmd = "PATH=\"%s\" %s" % (path, cmd)
+        bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
+        ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
+
+        os.chdir(save_cwd)
+
+        if ret != 0:
+            raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
+
+        if iterate is True:
+            iterate_urldata = urldata
+            iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
+            self.unpack(urldata, rootdir, data)
+
+        return
+
+    def clean(self, urldata, d):
+        """
+        Clean any existing full or partial download
+        """
+        bb.utils.remove(urldata.localpath)
+
+    def try_premirror(self, urldata, d):
+        """
+        Should premirrors be used?
+        """
+        return True
+
+    def checkstatus(self, fetch, urldata, d):
+        """
+        Check the status of a URL
+        Assumes localpath was called first
+        """
+        logger.info("URL %s could not be checked for status since no method exists.", url)
+        return True
+
+    def latest_revision(self, ud, d, name):
+        """
+        Look in the cache for the latest revision, if not present ask the SCM.
+        """
+        if not hasattr(self, "_latest_revision"):
+            raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
+
+        revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
+        key = self.generate_revision_key(ud, d, name)
+        try:
+            return revs[key]
+        except KeyError:
+            revs[key] = rev = self._latest_revision(ud, d, name)
+            return rev
+
+    def sortable_revision(self, ud, d, name):
+        latest_rev = self._build_revision(ud, d, name)
+        return True, str(latest_rev)
+
+    def generate_revision_key(self, ud, d, name):
+        key = self._revision_key(ud, d, name)
+        return "%s-%s" % (key, d.getVar("PN", True) or "")
+
+class Fetch(object):
+    def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
+        if localonly and cache:
+            raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
+
+        if len(urls) == 0:
+            urls = d.getVar("SRC_URI", True).split()
+        self.urls = urls
+        self.d = d
+        self.ud = {}
+        self.connection_cache = connection_cache
+
+        fn = d.getVar('FILE', True)
+        if cache and fn and fn in urldata_cache:
+            self.ud = urldata_cache[fn]
+
+        for url in urls:
+            if url not in self.ud:
+                try:
+                    self.ud[url] = FetchData(url, d, localonly)
+                except NonLocalMethod:
+                    if localonly:
+                        self.ud[url] = None
+                        pass
+
+        if fn and cache:
+            urldata_cache[fn] = self.ud
+
+    def localpath(self, url):
+        if url not in self.urls:
+            self.ud[url] = FetchData(url, self.d)
+
+        self.ud[url].setup_localpath(self.d)
+        return self.d.expand(self.ud[url].localpath)
+
+    def localpaths(self):
+        """
+        Return a list of the local filenames, assuming successful fetch
+        """
+        local = []
+
+        for u in self.urls:
+            ud = self.ud[u]
+            ud.setup_localpath(self.d)
+            local.append(ud.localpath)
+
+        return local
+
+    def download(self, urls=None):
+        """
+        Fetch all urls
+        """
+        if not urls:
+            urls = self.urls
+
+        network = self.d.getVar("BB_NO_NETWORK", True)
+        premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1")
+
+        for u in urls:
+            ud = self.ud[u]
+            ud.setup_localpath(self.d)
+            m = ud.method
+            localpath = ""
+
+            if ud.lockfile:
+                lf = bb.utils.lockfile(ud.lockfile)
+
+            try:
+                self.d.setVar("BB_NO_NETWORK", network)
+ 
+                if verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
+                    localpath = ud.localpath
+                elif m.try_premirror(ud, self.d):
+                    logger.debug(1, "Trying PREMIRRORS")
+                    mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
+                    localpath = try_mirrors(self, self.d, ud, mirrors, False)
+
+                if premirroronly:
+                    self.d.setVar("BB_NO_NETWORK", "1")
+
+                os.chdir(self.d.getVar("DL_DIR", True))
+
+                firsterr = None
+                verified_stamp = verify_donestamp(ud, self.d)
+                if not localpath and (not verified_stamp or m.need_update(ud, self.d)):
+                    try:
+                        if not trusted_network(self.d, ud.url):
+                            raise UntrustedUrl(ud.url)
+                        logger.debug(1, "Trying Upstream")
+                        m.download(ud, self.d)
+                        if hasattr(m, "build_mirror_data"):
+                            m.build_mirror_data(ud, self.d)
+                        localpath = ud.localpath
+                        # early checksum verify, so that if checksum mismatched,
+                        # fetcher still have chance to fetch from mirror
+                        update_stamp(ud, self.d)
+
+                    except bb.fetch2.NetworkAccess:
+                        raise
+
+                    except BBFetchException as e:
+                        if isinstance(e, ChecksumError):
+                            logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
+                            logger.debug(1, str(e))
+                            rename_bad_checksum(ud, e.checksum)
+                        elif isinstance(e, NoChecksumError):
+                            raise
+                        else:
+                            logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u)
+                            logger.debug(1, str(e))
+                        firsterr = e
+                        # Remove any incomplete fetch
+                        if not verified_stamp:
+                            m.clean(ud, self.d)
+                        logger.debug(1, "Trying MIRRORS")
+                        mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
+                        localpath = try_mirrors(self, self.d, ud, mirrors)
+
+                if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
+                    if firsterr:
+                        logger.error(str(firsterr))
+                    raise FetchError("Unable to fetch URL from any source.", u)
+
+                update_stamp(ud, self.d)
+
+            except BBFetchException as e:
+                if isinstance(e, ChecksumError):
+                    logger.error("Checksum failure fetching %s" % u)
+                raise
+
+            finally:
+                if ud.lockfile:
+                    bb.utils.unlockfile(lf)
+
+    def checkstatus(self, urls=None):
+        """
+        Check all urls exist upstream
+        """
+
+        if not urls:
+            urls = self.urls
+
+        for u in urls:
+            ud = self.ud[u]
+            ud.setup_localpath(self.d)
+            m = ud.method
+            logger.debug(1, "Testing URL %s", u)
+            # First try checking uri, u, from PREMIRRORS
+            mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
+            ret = try_mirrors(self, self.d, ud, mirrors, True)
+            if not ret:
+                # Next try checking from the original uri, u
+                try:
+                    ret = m.checkstatus(self, ud, self.d)
+                except:
+                    # Finally, try checking uri, u, from MIRRORS
+                    mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
+                    ret = try_mirrors(self, self.d, ud, mirrors, True)
+
+            if not ret:
+                raise FetchError("URL %s doesn't work" % u, u)
+
+    def unpack(self, root, urls=None):
+        """
+        Check all urls exist upstream
+        """
+
+        if not urls:
+            urls = self.urls
+
+        for u in urls:
+            ud = self.ud[u]
+            ud.setup_localpath(self.d)
+
+            if ud.lockfile:
+                lf = bb.utils.lockfile(ud.lockfile)
+
+            ud.method.unpack(ud, root, self.d)
+
+            if ud.lockfile:
+                bb.utils.unlockfile(lf)
+
+    def clean(self, urls=None):
+        """
+        Clean files that the fetcher gets or places
+        """
+
+        if not urls:
+            urls = self.urls
+
+        for url in urls:
+            if url not in self.ud:
+                self.ud[url] = FetchData(url, d)
+            ud = self.ud[url]
+            ud.setup_localpath(self.d)
+
+            if not ud.localfile and ud.localpath is None:
+                continue
+
+            if ud.lockfile:
+                lf = bb.utils.lockfile(ud.lockfile)
+
+            ud.method.clean(ud, self.d)
+            if ud.donestamp:
+                bb.utils.remove(ud.donestamp)
+
+            if ud.lockfile:
+                bb.utils.unlockfile(lf)
+
+class FetchConnectionCache(object):
+    """
+        A class which represents an container for socket connections.
+    """
+    def __init__(self):
+        self.cache = {}
+
+    def get_connection_name(self, host, port):
+        return host + ':' + str(port)
+
+    def add_connection(self, host, port, connection):
+        cn = self.get_connection_name(host, port)
+
+        if cn not in self.cache:
+            self.cache[cn] = connection
+
+    def get_connection(self, host, port):
+        connection = None
+
+        cn = self.get_connection_name(host, port)
+        if cn in self.cache:
+            connection = self.cache[cn]
+
+        return connection
+
+    def remove_connection(self, host, port):
+        cn = self.get_connection_name(host, port)
+        if cn in self.cache:
+            self.cache[cn].close()
+            del self.cache[cn]
+
+    def close_connections(self):
+        for cn in self.cache.keys():
+            self.cache[cn].close()
+            del self.cache[cn]
+
+from . import cvs
+from . import git
+from . import gitsm
+from . import gitannex
+from . import local
+from . import svn
+from . import wget
+from . import ssh
+from . import sftp
+from . import perforce
+from . import bzr
+from . import hg
+from . import osc
+from . import repo
+from . import clearcase
+from . import npm
+
+methods.append(local.Local())
+methods.append(wget.Wget())
+methods.append(svn.Svn())
+methods.append(git.Git())
+methods.append(gitsm.GitSM())
+methods.append(gitannex.GitANNEX())
+methods.append(cvs.Cvs())
+methods.append(ssh.SSH())
+methods.append(sftp.SFTP())
+methods.append(perforce.Perforce())
+methods.append(bzr.Bzr())
+methods.append(hg.Hg())
+methods.append(osc.Osc())
+methods.append(repo.Repo())
+methods.append(clearcase.ClearCase())
+methods.append(npm.Npm())

+ 143 - 0
bitbake/lib/bb/fetch2/bzr.py

@@ -0,0 +1,143 @@
+"""
+BitBake 'Fetch' implementation for bzr.
+
+"""
+
+# Copyright (C) 2007 Ross Burton
+# Copyright (C) 2007 Richard Purdie
+#
+#   Classes for obtaining upstream sources for the
+#   BitBake build tools.
+#   Copyright (C) 2003, 2004  Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import sys
+import logging
+import bb
+from bb import data
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import logger
+
+class Bzr(FetchMethod):
+    def supports(self, ud, d):
+        return ud.type in ['bzr']
+
+    def urldata_init(self, ud, d):
+        """
+        init bzr specific variable within url data
+        """
+        # Create paths to bzr checkouts
+        relpath = self._strip_leading_slashes(ud.path)
+        ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
+
+        ud.setup_revisons(d)
+
+        if not ud.revision:
+            ud.revision = self.latest_revision(ud, d)
+
+        ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
+
+    def _buildbzrcommand(self, ud, d, command):
+        """
+        Build up an bzr commandline based on ud
+        command is "fetch", "update", "revno"
+        """
+
+        basecmd = data.expand('${FETCHCMD_bzr}', d)
+
+        proto =  ud.parm.get('protocol', 'http')
+
+        bzrroot = ud.host + ud.path
+
+        options = []
+
+        if command == "revno":
+            bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
+        else:
+            if ud.revision:
+                options.append("-r %s" % ud.revision)
+
+            if command == "fetch":
+                bzrcmd = "%s branch %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
+            elif command == "update":
+                bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
+            else:
+                raise FetchError("Invalid bzr command %s" % command, ud.url)
+
+        return bzrcmd
+
+    def download(self, ud, d):
+        """Fetch url"""
+
+        if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
+            bzrcmd = self._buildbzrcommand(ud, d, "update")
+            logger.debug(1, "BZR Update %s", ud.url)
+            bb.fetch2.check_network_access(d, bzrcmd, ud.url)
+            os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
+            runfetchcmd(bzrcmd, d)
+        else:
+            bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
+            bzrcmd = self._buildbzrcommand(ud, d, "fetch")
+            bb.fetch2.check_network_access(d, bzrcmd, ud.url)
+            logger.debug(1, "BZR Checkout %s", ud.url)
+            bb.utils.mkdirhier(ud.pkgdir)
+            os.chdir(ud.pkgdir)
+            logger.debug(1, "Running %s", bzrcmd)
+            runfetchcmd(bzrcmd, d)
+
+        os.chdir(ud.pkgdir)
+
+        scmdata = ud.parm.get("scmdata", "")
+        if scmdata == "keep":
+            tar_flags = ""
+        else:
+            tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
+
+        # tar them up to a defined filename
+        runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath])
+
+    def supports_srcrev(self):
+        return True
+
+    def _revision_key(self, ud, d, name):
+        """
+        Return a unique key for the url
+        """
+        return "bzr:" + ud.pkgdir
+
+    def _latest_revision(self, ud, d, name):
+        """
+        Return the latest upstream revision number
+        """
+        logger.debug(2, "BZR fetcher hitting network for %s", ud.url)
+
+        bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)
+
+        output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
+
+        return output.strip()
+
+    def sortable_revision(self, ud, d, name):
+        """
+        Return a sortable revision number which in our case is the revision number
+        """
+
+        return False, self._build_revision(ud, d)
+
+    def _build_revision(self, ud, d):
+        return ud.revision

+ 263 - 0
bitbake/lib/bb/fetch2/clearcase.py

@@ -0,0 +1,263 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' clearcase implementation
+
+The clearcase fetcher is used to retrieve files from a ClearCase repository.
+
+Usage in the recipe:
+
+    SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
+    SRCREV = "EXAMPLE_CLEARCASE_TAG"
+    PV = "${@d.getVar("SRCREV", False).replace("/", "+")}"
+
+The fetcher uses the rcleartool or cleartool remote client, depending on which one is available.
+
+Supported SRC_URI options are:
+
+- vob
+    (required) The name of the clearcase VOB (with prepending "/")
+
+- module
+    The module in the selected VOB (with prepending "/")
+
+    The module and vob parameters are combined to create
+    the following load rule in the view config spec:
+                load <vob><module>
+
+- proto
+    http or https
+
+Related variables:
+
+    CCASE_CUSTOM_CONFIG_SPEC
+            Write a config spec to this variable in your recipe to use it instead
+            of the default config spec generated by this fetcher.
+            Please note that the SRCREV loses its functionality if you specify
+            this variable. SRCREV is still used to label the archive after a fetch,
+            but it doesn't define what's fetched.
+
+User credentials:
+    cleartool:
+            The login of cleartool is handled by the system. No special steps needed.
+
+    rcleartool:
+            In order to use rcleartool with authenticated users an `rcleartool login` is
+            necessary before using the fetcher.
+"""
+# Copyright (C) 2014 Siemens AG
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+
+import os
+import sys
+import shutil
+import bb
+from   bb import data
+from   bb.fetch2 import FetchMethod
+from   bb.fetch2 import FetchError
+from   bb.fetch2 import runfetchcmd
+from   bb.fetch2 import logger
+from   distutils import spawn
+
+class ClearCase(FetchMethod):
+    """Class to fetch urls via 'clearcase'"""
+    def init(self, d):
+        pass
+
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with Clearcase.
+        """
+        return ud.type in ['ccrc']
+
+    def debug(self, msg):
+        logger.debug(1, "ClearCase: %s", msg)
+
+    def urldata_init(self, ud, d):
+        """
+        init ClearCase specific variable within url data
+        """
+        ud.proto = "https"
+        if 'protocol' in ud.parm:
+            ud.proto = ud.parm['protocol']
+        if not ud.proto in ('http', 'https'):
+            raise fetch2.ParameterError("Invalid protocol type", ud.url)
+
+        ud.vob = ''
+        if 'vob' in ud.parm:
+            ud.vob = ud.parm['vob']
+        else:
+            msg = ud.url+": vob must be defined so the fetcher knows what to get."
+            raise MissingParameterError('vob', msg)
+
+        if 'module' in ud.parm:
+            ud.module = ud.parm['module']
+        else:
+            ud.module = ""
+
+        ud.basecmd = d.getVar("FETCHCMD_ccrc", True) or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool")
+
+        if data.getVar("SRCREV", d, True) == "INVALID":
+          raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")
+
+        ud.label = d.getVar("SRCREV", False)
+        ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True)
+
+        ud.server     = "%s://%s%s" % (ud.proto, ud.host, ud.path)
+
+        ud.identifier = "clearcase-%s%s-%s" % ( ud.vob.replace("/", ""),
+                                                ud.module.replace("/", "."),
+                                                ud.label.replace("/", "."))
+
+        ud.viewname         = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True))
+        ud.csname           = "%s-config-spec" % (ud.identifier)
+        ud.ccasedir         = os.path.join(data.getVar("DL_DIR", d, True), ud.type)
+        ud.viewdir          = os.path.join(ud.ccasedir, ud.viewname)
+        ud.configspecfile   = os.path.join(ud.ccasedir, ud.csname)
+        ud.localfile        = "%s.tar.gz" % (ud.identifier)
+
+        self.debug("host            = %s" % ud.host)
+        self.debug("path            = %s" % ud.path)
+        self.debug("server          = %s" % ud.server)
+        self.debug("proto           = %s" % ud.proto)
+        self.debug("type            = %s" % ud.type)
+        self.debug("vob             = %s" % ud.vob)
+        self.debug("module          = %s" % ud.module)
+        self.debug("basecmd         = %s" % ud.basecmd)
+        self.debug("label           = %s" % ud.label)
+        self.debug("ccasedir        = %s" % ud.ccasedir)
+        self.debug("viewdir         = %s" % ud.viewdir)
+        self.debug("viewname        = %s" % ud.viewname)
+        self.debug("configspecfile  = %s" % ud.configspecfile)
+        self.debug("localfile       = %s" % ud.localfile)
+
+        ud.localfile = os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
+
+    def _build_ccase_command(self, ud, command):
+        """
+        Build up a commandline based on ud
+        command is: mkview, setcs, rmview
+        """
+        options = []
+
+        if "rcleartool" in ud.basecmd:
+            options.append("-server %s" % ud.server)
+
+        basecmd = "%s %s" % (ud.basecmd, command)
+
+        if command is 'mkview':
+            if not "rcleartool" in ud.basecmd:
+                # Cleartool needs a -snapshot view
+                options.append("-snapshot")
+            options.append("-tag %s" % ud.viewname)
+            options.append(ud.viewdir)
+
+        elif command is 'rmview':
+            options.append("-force")
+            options.append("%s" % ud.viewdir)
+
+        elif command is 'setcs':
+            options.append("-overwrite")
+            options.append(ud.configspecfile)
+
+        else:
+            raise FetchError("Invalid ccase command %s" % command)
+
+        ccasecmd = "%s %s" % (basecmd, " ".join(options))
+        self.debug("ccasecmd = %s" % ccasecmd)
+        return ccasecmd
+
+    def _write_configspec(self, ud, d):
+        """
+        Create config spec file (ud.configspecfile) for ccase view
+        """
+        config_spec = ""
+        custom_config_spec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", d)
+        if custom_config_spec is not None:
+            for line in custom_config_spec.split("\\n"):
+                config_spec += line+"\n"
+            bb.warn("A custom config spec has been set, SRCREV is only relevant for the tarball name.")
+        else:
+            config_spec += "element * CHECKEDOUT\n"
+            config_spec += "element * %s\n" % ud.label
+            config_spec += "load %s%s\n" % (ud.vob, ud.module)
+
+        logger.info("Using config spec: \n%s" % config_spec)
+
+        with open(ud.configspecfile, 'w') as f:
+            f.write(config_spec)
+
+    def _remove_view(self, ud, d):
+        if os.path.exists(ud.viewdir):
+            os.chdir(ud.ccasedir)
+            cmd = self._build_ccase_command(ud, 'rmview');
+            logger.info("cleaning up [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
+            bb.fetch2.check_network_access(d, cmd, ud.url)
+            output = runfetchcmd(cmd, d)
+            logger.info("rmview output: %s", output)
+
+    def need_update(self, ud, d):
+        if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec):
+            ud.identifier += "-%s" % d.getVar("DATETIME",d, True)
+            return True
+        if os.path.exists(ud.localpath):
+            return False
+        return True
+
+    def supports_srcrev(self):
+        return True
+
+    def sortable_revision(self, ud, d, name):
+        return False, ud.identifier
+
+    def download(self, ud, d):
+        """Fetch url"""
+
+        # Make a fresh view
+        bb.utils.mkdirhier(ud.ccasedir)
+        self._write_configspec(ud, d)
+        cmd = self._build_ccase_command(ud, 'mkview')
+        logger.info("creating view [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
+        bb.fetch2.check_network_access(d, cmd, ud.url)
+        try:
+            runfetchcmd(cmd, d)
+        except FetchError as e:
+            if "CRCLI2008E" in e.msg:
+                raise FetchError("%s\n%s\n" % (e.msg, "Call `rcleartool login` in your console to authenticate to the clearcase server before running bitbake."))
+            else:
+                raise e
+
+        # Set configspec: Setting the configspec effectively fetches the files as defined in the configspec
+        os.chdir(ud.viewdir)
+        cmd = self._build_ccase_command(ud, 'setcs');
+        logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
+        bb.fetch2.check_network_access(d, cmd, ud.url)
+        output = runfetchcmd(cmd, d)
+        logger.info("%s", output)
+
+        # Copy the configspec to the viewdir so we have it in our source tarball later
+        shutil.copyfile(ud.configspecfile, os.path.join(ud.viewdir, ud.csname))
+
+        # Clean clearcase meta-data before tar
+
+        runfetchcmd('tar -czf "%s" .' % (ud.localpath), d, cleanup = [ud.localpath])
+
+        # Clean up so we can create a new view next time
+        self.clean(ud, d);
+
+    def clean(self, ud, d):
+        self._remove_view(ud, d)
+        bb.utils.remove(ud.configspecfile)

+ 171 - 0
bitbake/lib/bb/fetch2/cvs.py

@@ -0,0 +1,171 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+
+"""
+
+# Copyright (C) 2003, 2004  Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+#Based on functions from the base bb module, Copyright 2003 Holger Schurig
+#
+
+import os
+import logging
+import bb
+from bb.fetch2 import FetchMethod, FetchError, MissingParameterError, logger
+from bb.fetch2 import runfetchcmd
+
+class Cvs(FetchMethod):
+    """
+    Class to fetch a module or modules from cvs repositories
+    """
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with cvs.
+        """
+        return ud.type in ['cvs']
+
+    def urldata_init(self, ud, d):
+        if not "module" in ud.parm:
+            raise MissingParameterError("module", ud.url)
+        ud.module = ud.parm["module"]
+
+        ud.tag = ud.parm.get('tag', "")
+
+        # Override the default date in certain cases
+        if 'date' in ud.parm:
+            ud.date = ud.parm['date']
+        elif ud.tag:
+            ud.date = ""
+
+        norecurse = ''
+        if 'norecurse' in ud.parm:
+            norecurse = '_norecurse'
+
+        fullpath = ''
+        if 'fullpath' in ud.parm:
+            fullpath = '_fullpath'
+
+        ud.localfile = bb.data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
+
+    def need_update(self, ud, d):
+        if (ud.date == "now"):
+            return True
+        if not os.path.exists(ud.localpath):
+            return True
+        return False
+
+    def download(self, ud, d):
+
+        method = ud.parm.get('method', 'pserver')
+        localdir = ud.parm.get('localdir', ud.module)
+        cvs_port = ud.parm.get('port', '')
+
+        cvs_rsh = None
+        if method == "ext":
+            if "rsh" in ud.parm:
+                cvs_rsh = ud.parm["rsh"]
+
+        if method == "dir":
+            cvsroot = ud.path
+        else:
+            cvsroot = ":" + method
+            cvsproxyhost = d.getVar('CVS_PROXY_HOST', True)
+            if cvsproxyhost:
+                cvsroot += ";proxy=" + cvsproxyhost
+            cvsproxyport = d.getVar('CVS_PROXY_PORT', True)
+            if cvsproxyport:
+                cvsroot += ";proxyport=" + cvsproxyport
+            cvsroot += ":" + ud.user
+            if ud.pswd:
+                cvsroot += ":" + ud.pswd
+            cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
+
+        options = []
+        if 'norecurse' in ud.parm:
+            options.append("-l")
+        if ud.date:
+            # treat YYYYMMDDHHMM specially for CVS
+            if len(ud.date) == 12:
+                options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
+            else:
+                options.append("-D \"%s UTC\"" % ud.date)
+        if ud.tag:
+            options.append("-r %s" % ud.tag)
+
+        cvsbasecmd = d.getVar("FETCHCMD_cvs", True)
+        cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module
+        cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options)
+
+        if cvs_rsh:
+            cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
+            cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
+
+        # create module directory
+        logger.debug(2, "Fetch: checking for module directory")
+        pkg = d.getVar('PN', True)
+        pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg)
+        moddir = os.path.join(pkgdir, localdir)
+        if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
+            logger.info("Update " + ud.url)
+            bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
+            # update sources there
+            os.chdir(moddir)
+            cmd = cvsupdatecmd
+        else:
+            logger.info("Fetch " + ud.url)
+            # check out sources there
+            bb.utils.mkdirhier(pkgdir)
+            os.chdir(pkgdir)
+            logger.debug(1, "Running %s", cvscmd)
+            bb.fetch2.check_network_access(d, cvscmd, ud.url)
+            cmd = cvscmd
+
+        runfetchcmd(cmd, d, cleanup = [moddir])
+
+        if not os.access(moddir, os.R_OK):
+            raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)
+
+        scmdata = ud.parm.get("scmdata", "")
+        if scmdata == "keep":
+            tar_flags = ""
+        else:
+            tar_flags = "--exclude 'CVS'"
+
+        # tar them up to a defined filename
+        if 'fullpath' in ud.parm:
+            os.chdir(pkgdir)
+            cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
+        else:
+            os.chdir(moddir)
+            os.chdir('..')
+            cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))
+
+        runfetchcmd(cmd, d, cleanup = [ud.localpath])
+
+    def clean(self, ud, d):
+        """ Clean CVS Files and tarballs """
+
+        pkg = d.getVar('PN', True)
+        pkgdir = os.path.join(d.getVar("CVSDIR", True), pkg)
+
+        bb.utils.remove(pkgdir, True)
+        bb.utils.remove(ud.localpath)
+

+ 451 - 0
bitbake/lib/bb/fetch2/git.py

@@ -0,0 +1,451 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' git implementation
+
+git fetcher support the SRC_URI with format of:
+SRC_URI = "git://some.host/somepath;OptionA=xxx;OptionB=xxx;..."
+
+Supported SRC_URI options are:
+
+- branch
+   The git branch to retrieve from. The default is "master"
+
+   This option also supports multiple branch fetching, with branches
+   separated by commas.  In multiple branches case, the name option
+   must have the same number of names to match the branches, which is
+   used to specify the SRC_REV for the branch
+   e.g:
+   SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY"
+   SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx"
+   SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY"
+
+- tag
+    The git tag to retrieve. The default is "master"
+
+- protocol
+   The method to use to access the repository. Common options are "git",
+   "http", "https", "file", "ssh" and "rsync". The default is "git".
+
+- rebaseable
+   rebaseable indicates that the upstream git repo may rebase in the future,
+   and current revision may disappear from upstream repo. This option will
+   remind fetcher to preserve local cache carefully for future use.
+   The default value is "0", set rebaseable=1 for rebaseable git repo.
+
+- nocheckout
+   Don't checkout source code when unpacking. set this option for the recipe
+   who has its own routine to checkout code.
+   The default is "0", set nocheckout=1 if needed.
+
+- bareclone
+   Create a bare clone of the source code and don't checkout the source code
+   when unpacking. Set this option for the recipe who has its own routine to
+   checkout code and tracking branch requirements.
+   The default is "0", set bareclone=1 if needed.
+
+- nobranch
+   Don't check the SHA validation for branch. set this option for the recipe
+   referring to commit which is valid in tag instead of branch.
+   The default is "0", set nobranch=1 if needed.
+
+"""
+
+#Copyright (C) 2005 Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import errno
+import os
+import re
+import bb
+import errno
+from   bb    import data
+from   bb.fetch2 import FetchMethod
+from   bb.fetch2 import runfetchcmd
+from   bb.fetch2 import logger
+
+class Git(FetchMethod):
+    """Class to fetch a module or modules from git repositories"""
+    def init(self, d):
+        pass
+
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with git.
+        """
+        return ud.type in ['git']
+
+    def supports_checksum(self, urldata):
+        return False
+
+    def urldata_init(self, ud, d):
+        """
+        init git specific variable within url data
+        so that the git method like latest_revision() can work
+        """
+        if 'protocol' in ud.parm:
+            ud.proto = ud.parm['protocol']
+        elif not ud.host:
+            ud.proto = 'file'
+        else:
+            ud.proto = "git"
+
+        if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
+            raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
+
+        ud.nocheckout = ud.parm.get("nocheckout","0") == "1"
+
+        ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
+
+        ud.nobranch = ud.parm.get("nobranch","0") == "1"
+
+        # bareclone implies nocheckout
+        ud.bareclone = ud.parm.get("bareclone","0") == "1"
+        if ud.bareclone:
+            ud.nocheckout = 1
+  
+        ud.unresolvedrev = {}
+        branches = ud.parm.get("branch", "master").split(',')
+        if len(branches) != len(ud.names):
+            raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
+        ud.branches = {}
+        for name in ud.names:
+            branch = branches[ud.names.index(name)]
+            ud.branches[name] = branch
+            ud.unresolvedrev[name] = branch
+
+        ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git -c core.fsyncobjectfiles=0"
+
+        ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable
+
+        ud.setup_revisons(d)
+
+        for name in ud.names:
+            # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
+            if not ud.revisions[name] or len(ud.revisions[name]) != 40  or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
+                if ud.revisions[name]:
+                    ud.unresolvedrev[name] = ud.revisions[name]
+                ud.revisions[name] = self.latest_revision(ud, d, name)
+
+        gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.'))
+        if gitsrcname.startswith('.'):
+            gitsrcname = gitsrcname[1:]
+
+        # for rebaseable git repo, it is necessary to keep mirror tar ball
+        # per revision, so that even the revision disappears from the
+        # upstream repo in the future, the mirror will remain intact and still
+        # contains the revision
+        if ud.rebaseable:
+            for name in ud.names:
+                gitsrcname = gitsrcname + '_' + ud.revisions[name]
+        ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
+        ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
+        gitdir = d.getVar("GITDIR", True) or (d.getVar("DL_DIR", True) + "/git2/")
+        ud.clonedir = os.path.join(gitdir, gitsrcname)
+
+        ud.localfile = ud.clonedir
+
+    def localpath(self, ud, d):
+        return ud.clonedir
+
+    def need_update(self, ud, d):
+        if not os.path.exists(ud.clonedir):
+            return True
+        os.chdir(ud.clonedir)
+        for name in ud.names:
+            if not self._contains_ref(ud, d, name):
+                return True
+        if ud.write_tarballs and not os.path.exists(ud.fullmirror):
+            return True
+        return False
+
+    def try_premirror(self, ud, d):
+        # If we don't do this, updating an existing checkout with only premirrors
+        # is not possible
+        if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
+            return True
+        if os.path.exists(ud.clonedir):
+            return False
+        return True
+
+    def download(self, ud, d):
+        """Fetch url"""
+
+        # If the checkout doesn't exist and the mirror tarball does, extract it
+        if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
+            bb.utils.mkdirhier(ud.clonedir)
+            os.chdir(ud.clonedir)
+            runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
+
+        repourl = self._get_repo_url(ud)
+
+        # If the repo still doesn't exist, fallback to cloning it
+        if not os.path.exists(ud.clonedir):
+            # We do this since git will use a "-l" option automatically for local urls where possible
+            if repourl.startswith("file://"):
+                repourl = repourl[7:]
+            clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir)
+            if ud.proto.lower() != 'file':
+                bb.fetch2.check_network_access(d, clone_cmd)
+            runfetchcmd(clone_cmd, d)
+
+        os.chdir(ud.clonedir)
+        # Update the checkout if needed
+        needupdate = False
+        for name in ud.names:
+            if not self._contains_ref(ud, d, name):
+                needupdate = True
+        if needupdate:
+            try: 
+                runfetchcmd("%s remote rm origin" % ud.basecmd, d) 
+            except bb.fetch2.FetchError:
+                logger.debug(1, "No Origin")
+
+            runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d)
+            fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl)
+            if ud.proto.lower() != 'file':
+                bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
+            runfetchcmd(fetch_cmd, d)
+            runfetchcmd("%s prune-packed" % ud.basecmd, d)
+            runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
+            try:
+                os.unlink(ud.fullmirror)
+            except OSError as exc:
+                if exc.errno != errno.ENOENT:
+                    raise
+        os.chdir(ud.clonedir)
+        for name in ud.names:
+            if not self._contains_ref(ud, d, name):
+                raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
+
+    def build_mirror_data(self, ud, d):
+        # Generate a mirror tarball if needed
+        if ud.write_tarballs and not os.path.exists(ud.fullmirror):
+            # it's possible that this symlink points to read-only filesystem with PREMIRROR
+            if os.path.islink(ud.fullmirror):
+                os.unlink(ud.fullmirror)
+
+            os.chdir(ud.clonedir)
+            logger.info("Creating tarball of git repository")
+            runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d)
+            runfetchcmd("touch %s.done" % (ud.fullmirror), d)
+
+    def unpack(self, ud, destdir, d):
+        """ unpack the downloaded src to destdir"""
+
+        subdir = ud.parm.get("subpath", "")
+        if subdir != "":
+            readpathspec = ":%s" % (subdir)
+            def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
+        else:
+            readpathspec = ""
+            def_destsuffix = "git/"
+
+        destsuffix = ud.parm.get("destsuffix", def_destsuffix)
+        destdir = ud.destdir = os.path.join(destdir, destsuffix)
+        if os.path.exists(destdir):
+            bb.utils.prunedir(destdir)
+
+        cloneflags = "-s -n"
+        if ud.bareclone:
+            cloneflags += " --mirror"
+
+        # Versions of git prior to 1.7.9.2 have issues where foo.git and foo get confused
+        # and you end up with some horrible union of the two when you attempt to clone it
+        # The least invasive workaround seems to be a symlink to the real directory to
+        # fool git into ignoring any .git version that may also be present.
+        #
+        # The issue is fixed in more recent versions of git so we can drop this hack in future
+        # when that version becomes common enough.
+        clonedir = ud.clonedir
+        if not ud.path.endswith(".git"):
+            indirectiondir = destdir[:-1] + ".indirectionsymlink"
+            if os.path.exists(indirectiondir):
+                os.remove(indirectiondir)
+            bb.utils.mkdirhier(os.path.dirname(indirectiondir))
+            os.symlink(ud.clonedir, indirectiondir)
+            clonedir = indirectiondir
+
+        runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, cloneflags, clonedir, destdir), d)
+        os.chdir(destdir)
+        repourl = self._get_repo_url(ud)
+        runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d)
+        if not ud.nocheckout:
+            if subdir != "":
+                runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
+                runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
+            elif not ud.nobranch:
+                branchname =  ud.branches[ud.names[0]]
+                runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
+                            ud.revisions[ud.names[0]]), d)
+                runfetchcmd("%s branch --set-upstream %s origin/%s" % (ud.basecmd, branchname, \
+                            branchname), d)
+            else:
+                runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d)
+
+        return True
+
+    def clean(self, ud, d):
+        """ clean the git directory """
+
+        bb.utils.remove(ud.localpath, True)
+        bb.utils.remove(ud.fullmirror)
+        bb.utils.remove(ud.fullmirror + ".done")
+
+    def supports_srcrev(self):
+        return True
+
+    def _contains_ref(self, ud, d, name):
+        cmd = ""
+        if ud.nobranch:
+            cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
+                ud.basecmd, ud.revisions[name])
+        else:
+            cmd =  "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
+                ud.basecmd, ud.revisions[name], ud.branches[name])
+        try:
+            output = runfetchcmd(cmd, d, quiet=True)
+        except bb.fetch2.FetchError:
+            return False
+        if len(output.split()) > 1:
+            raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
+        return output.split()[0] != "0"
+
+    def _get_repo_url(self, ud):
+        """
+        Return the repository URL
+        """
+        if ud.user:
+            username = ud.user + '@'
+        else:
+            username = ""
+        return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
+
+    def _revision_key(self, ud, d, name):
+        """
+        Return a unique key for the url
+        """
+        return "git:" + ud.host + ud.path.replace('/', '.') + ud.unresolvedrev[name]
+
+    def _lsremote(self, ud, d, search):
+        """
+        Run git ls-remote with the specified search string
+        """
+        repourl = self._get_repo_url(ud)
+        cmd = "%s ls-remote %s %s" % \
+              (ud.basecmd, repourl, search)
+        if ud.proto.lower() != 'file':
+            bb.fetch2.check_network_access(d, cmd)
+        output = runfetchcmd(cmd, d, True)
+        if not output:
+            raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
+        return output
+
+    def _latest_revision(self, ud, d, name):
+        """
+        Compute the HEAD revision for the url
+        """
+        output = self._lsremote(ud, d, "")
+        # Tags of the form ^{} may not work, need to fallback to other form
+        if ud.unresolvedrev[name][:5] == "refs/":
+            head = ud.unresolvedrev[name]
+            tag = ud.unresolvedrev[name]
+        else:
+            head = "refs/heads/%s" % ud.unresolvedrev[name]
+            tag = "refs/tags/%s" % ud.unresolvedrev[name]
+        for s in [head, tag + "^{}", tag]:
+            for l in output.split('\n'):
+                if s in l:
+                    return l.split()[0]
+        raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
+            (ud.unresolvedrev[name], ud.host+ud.path))
+
+    def latest_versionstring(self, ud, d):
+        """
+        Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
+        by searching through the tags output of ls-remote, comparing
+        versions and returning the highest match.
+        """
+        pupver = ('', '')
+
+        tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX', True) or "(?P<pver>([0-9][\.|_]?)+)")
+        try:
+            output = self._lsremote(ud, d, "refs/tags/*")
+        except bb.fetch2.FetchError or bb.fetch2.NetworkAccess:
+            return pupver
+
+        verstring = ""
+        revision = ""
+        for line in output.split("\n"):
+            if not line:
+                break
+
+            tag_head = line.split("/")[-1]
+            # Ignore non-released branches
+            m = re.search("(alpha|beta|rc|final)+", tag_head)
+            if m:
+                continue
+
+            # search for version in the line
+            tag = tagregex.search(tag_head)
+            if tag == None:
+                continue
+
+            tag = tag.group('pver')
+            tag = tag.replace("_", ".")
+
+            if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
+                continue
+
+            verstring = tag
+            revision = line.split()[0]
+            pupver = (verstring, revision)
+
+        return pupver
+
+    def _build_revision(self, ud, d, name):
+        return ud.revisions[name]
+
+    def gitpkgv_revision(self, ud, d, name):
+        """
+        Return a sortable revision number by counting commits in the history
+        Based on gitpkgv.bblass in meta-openembedded
+        """
+        rev = self._build_revision(ud, d, name)
+        localpath = ud.localpath
+        rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
+        if not os.path.exists(localpath):
+            commits = None
+        else:
+            if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
+                from pipes import quote
+                commits = bb.fetch2.runfetchcmd(
+                        "git rev-list %s -- | wc -l" % (quote(rev)),
+                        d, quiet=True).strip().lstrip('0')
+                if commits:
+                    open(rev_file, "w").write("%d\n" % int(commits))
+            else:
+                commits = open(rev_file, "r").readline(128).strip()
+        if commits:
+            return False, "%s+%s" % (commits, rev[:7])
+        else:
+            return True, str(rev)
+
+    def checkstatus(self, fetch, ud, d):
+        try:
+            self._lsremote(ud, d, "")
+            return True
+        except FetchError:
+            return False

+ 76 - 0
bitbake/lib/bb/fetch2/gitannex.py

@@ -0,0 +1,76 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' git annex implementation
+"""
+
+# Copyright (C) 2014 Otavio Salvador
+# Copyright (C) 2014 O.S. Systems Software LTDA.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import bb
+from   bb import data
+from   bb.fetch2.git import Git
+from   bb.fetch2 import runfetchcmd
+from   bb.fetch2 import logger
+
+class GitANNEX(Git):
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with git.
+        """
+        return ud.type in ['gitannex']
+
+    def uses_annex(self, ud, d):
+        for name in ud.names:
+            try:
+                runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True)
+                return True
+            except bb.fetch.FetchError:
+                pass
+
+        return False
+
+    def update_annex(self, ud, d):
+        try:
+            runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True)
+        except bb.fetch.FetchError:
+            return False
+        runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True)
+
+        return True
+
+    def download(self, ud, d):
+        Git.download(self, ud, d)
+
+        os.chdir(ud.clonedir)
+        annex = self.uses_annex(ud, d)
+        if annex:
+            self.update_annex(ud, d)
+
+    def unpack(self, ud, destdir, d):
+        Git.unpack(self, ud, destdir, d)
+
+        os.chdir(ud.destdir)
+        try:
+            runfetchcmd("%s annex sync" % (ud.basecmd), d)
+        except bb.fetch.FetchError:
+            pass
+
+        annex = self.uses_annex(ud, d)
+        if annex:
+            runfetchcmd("%s annex get" % (ud.basecmd), d)
+            runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True)

+ 137 - 0
bitbake/lib/bb/fetch2/gitsm.py

@@ -0,0 +1,137 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' git submodules implementation
+
+Inherits from and extends the Git fetcher to retrieve submodules of a git repository
+after cloning.
+
+SRC_URI = "gitsm://<see Git fetcher for syntax>"
+
+See the Git fetcher, git://, for usage documentation.
+
+NOTE: Switching a SRC_URI from "git://" to "gitsm://" requires a clean of your recipe.
+
+"""
+
+# Copyright (C) 2013 Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import bb
+from   bb    import data
+from   bb.fetch2.git import Git
+from   bb.fetch2 import runfetchcmd
+from   bb.fetch2 import logger
+
+class GitSM(Git):
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with git.
+        """
+        return ud.type in ['gitsm']
+
+    def uses_submodules(self, ud, d):
+        for name in ud.names:
+            try:
+                runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True)
+                return True
+            except bb.fetch.FetchError:
+                pass
+        return False
+
+    def _set_relative_paths(self, repopath):
+        """
+        Fix submodule paths to be relative instead of absolute,
+        so that when we move the repo it doesn't break
+        (In Git 1.7.10+ this is done automatically)
+        """
+        submodules = []
+        with open(os.path.join(repopath, '.gitmodules'), 'r') as f:
+            for line in f.readlines():
+                if line.startswith('[submodule'):
+                    submodules.append(line.split('"')[1])
+
+        for module in submodules:
+            repo_conf = os.path.join(repopath, module, '.git')
+            if os.path.exists(repo_conf):
+                with open(repo_conf, 'r') as f:
+                    lines = f.readlines()
+                newpath = ''
+                for i, line in enumerate(lines):
+                    if line.startswith('gitdir:'):
+                        oldpath = line.split(': ')[-1].rstrip()
+                        if oldpath.startswith('/'):
+                            newpath = '../' * (module.count('/') + 1) + '.git/modules/' + module
+                            lines[i] = 'gitdir: %s\n' % newpath
+                            break
+                if newpath:
+                    with open(repo_conf, 'w') as f:
+                        for line in lines:
+                            f.write(line)
+
+            repo_conf2 = os.path.join(repopath, '.git', 'modules', module, 'config')
+            if os.path.exists(repo_conf2):
+                with open(repo_conf2, 'r') as f:
+                    lines = f.readlines()
+                newpath = ''
+                for i, line in enumerate(lines):
+                    if line.lstrip().startswith('worktree = '):
+                        oldpath = line.split(' = ')[-1].rstrip()
+                        if oldpath.startswith('/'):
+                            newpath = '../' * (module.count('/') + 3) + module
+                            lines[i] = '\tworktree = %s\n' % newpath
+                            break
+                if newpath:
+                    with open(repo_conf2, 'w') as f:
+                        for line in lines:
+                            f.write(line)
+
+    def update_submodules(self, ud, d):
+        # We have to convert bare -> full repo, do the submodule bit, then convert back
+        tmpclonedir = ud.clonedir + ".tmp"
+        gitdir = tmpclonedir + os.sep + ".git"
+        bb.utils.remove(tmpclonedir, True)
+        os.mkdir(tmpclonedir)
+        os.rename(ud.clonedir, gitdir)
+        runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
+        os.chdir(tmpclonedir)
+        runfetchcmd(ud.basecmd + " reset --hard", d)
+        runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d)
+        runfetchcmd(ud.basecmd + " submodule init", d)
+        runfetchcmd(ud.basecmd + " submodule update", d)
+        self._set_relative_paths(tmpclonedir)
+        runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d)
+        os.rename(gitdir, ud.clonedir,)
+        bb.utils.remove(tmpclonedir, True)
+
+    def download(self, ud, d):
+        Git.download(self, ud, d)
+
+        os.chdir(ud.clonedir)
+        submodules = self.uses_submodules(ud, d)
+        if submodules:
+            self.update_submodules(ud, d)
+
+    def unpack(self, ud, destdir, d):
+        Git.unpack(self, ud, destdir, d)
+        
+        os.chdir(ud.destdir)
+        submodules = self.uses_submodules(ud, d)
+        if submodules:
+            runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d)
+            runfetchcmd(ud.basecmd + " submodule init", d)
+            runfetchcmd(ud.basecmd + " submodule update", d)
+

+ 278 - 0
bitbake/lib/bb/fetch2/hg.py

@@ -0,0 +1,278 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementation for mercurial DRCS (hg).
+
+"""
+
+# Copyright (C) 2003, 2004  Chris Larson
+# Copyright (C) 2004        Marcin Juszkiewicz
+# Copyright (C) 2007        Robert Schuster
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import sys
+import logging
+import bb
+import errno
+from bb import data
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import MissingParameterError
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import logger
+
+class Hg(FetchMethod):
+    """Class to fetch from mercurial repositories"""
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with mercurial.
+        """
+        return ud.type in ['hg']
+
+    def supports_checksum(self, urldata):
+        """
+        Don't require checksums for local archives created from
+        repository checkouts.
+        """ 
+        return False
+
+    def urldata_init(self, ud, d):
+        """
+        init hg specific variable within url data
+        """
+        if not "module" in ud.parm:
+            raise MissingParameterError('module', ud.url)
+
+        ud.module = ud.parm["module"]
+
+        if 'protocol' in ud.parm:
+            ud.proto = ud.parm['protocol']
+        elif not ud.host:
+            ud.proto = 'file'
+        else:
+            ud.proto = "hg"
+
+        ud.setup_revisons(d)
+
+        if 'rev' in ud.parm:
+            ud.revision = ud.parm['rev']
+        elif not ud.revision:
+            ud.revision = self.latest_revision(ud, d)
+
+        # Create paths to mercurial checkouts
+        hgsrcname = '%s_%s_%s' % (ud.module.replace('/', '.'), \
+                            ud.host, ud.path.replace('/', '.'))
+        ud.mirrortarball = 'hg_%s.tar.gz' % hgsrcname
+        ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
+
+        hgdir = d.getVar("HGDIR", True) or (d.getVar("DL_DIR", True) + "/hg/")
+        ud.pkgdir = os.path.join(hgdir, hgsrcname)
+        ud.moddir = os.path.join(ud.pkgdir, ud.module)
+        ud.localfile = ud.moddir
+        ud.basecmd = data.getVar("FETCHCMD_hg", d, True) or "/usr/bin/env hg"
+
+        ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS", True)
+
+    def need_update(self, ud, d):
+        revTag = ud.parm.get('rev', 'tip')
+        if revTag == "tip":
+            return True
+        if not os.path.exists(ud.localpath):
+            return True
+        return False
+
+    def try_premirror(self, ud, d):
+        # If we don't do this, updating an existing checkout with only premirrors
+        # is not possible
+        if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
+            return True
+        if os.path.exists(ud.moddir):
+            return False
+        return True
+
+    def _buildhgcommand(self, ud, d, command):
+        """
+        Build up an hg commandline based on ud
+        command is "fetch", "update", "info"
+        """
+
+        proto = ud.parm.get('protocol', 'http')
+
+        host = ud.host
+        if proto == "file":
+            host = "/"
+            ud.host = "localhost"
+
+        if not ud.user:
+            hgroot = host + ud.path
+        else:
+            if ud.pswd:
+                hgroot = ud.user + ":" + ud.pswd + "@" + host + ud.path
+            else:
+                hgroot = ud.user + "@" + host + ud.path
+
+        if command == "info":
+            return "%s identify -i %s://%s/%s" % (ud.basecmd, proto, hgroot, ud.module)
+
+        options = [];
+
+        # Don't specify revision for the fetch; clone the entire repo.
+        # This avoids an issue if the specified revision is a tag, because
+        # the tag actually exists in the specified revision + 1, so it won't
+        # be available when used in any successive commands.
+        if ud.revision and command != "fetch":
+            options.append("-r %s" % ud.revision)
+
+        if command == "fetch":
+            if ud.user and ud.pswd:
+                cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" clone %s %s://%s/%s %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options), proto, hgroot, ud.module, ud.module)
+            else:
+                cmd = "%s clone %s %s://%s/%s %s" % (ud.basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
+        elif command == "pull":
+            # do not pass options list; limiting pull to rev causes the local
+            # repo not to contain it and immediately following "update" command
+            # will crash
+            if ud.user and ud.pswd:
+                cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (ud.basecmd, ud.user, ud.pswd, proto)
+            else:
+                cmd = "%s pull" % (ud.basecmd)
+        elif command == "update":
+            if ud.user and ud.pswd:
+                cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" update -C %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options))
+            else:
+                cmd = "%s update -C %s" % (ud.basecmd, " ".join(options))
+        else:
+            raise FetchError("Invalid hg command %s" % command, ud.url)
+
+        return cmd
+
+    def download(self, ud, d):
+        """Fetch url"""
+
+        logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
+
+        # If the checkout doesn't exist and the mirror tarball does, extract it
+        if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
+            bb.utils.mkdirhier(ud.pkgdir)
+            os.chdir(ud.pkgdir)
+            runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
+
+        if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
+            # Found the source, check whether need pull
+            updatecmd = self._buildhgcommand(ud, d, "update")
+            os.chdir(ud.moddir)
+            logger.debug(1, "Running %s", updatecmd)
+            try:
+                runfetchcmd(updatecmd, d)
+            except bb.fetch2.FetchError:
+                # Runnning pull in the repo
+                pullcmd = self._buildhgcommand(ud, d, "pull")
+                logger.info("Pulling " + ud.url)
+                # update sources there
+                os.chdir(ud.moddir)
+                logger.debug(1, "Running %s", pullcmd)
+                bb.fetch2.check_network_access(d, pullcmd, ud.url)
+                runfetchcmd(pullcmd, d)
+                try:
+                    os.unlink(ud.fullmirror)
+                except OSError as exc:
+                    if exc.errno != errno.ENOENT:
+                        raise
+
+        # No source found, clone it.
+        if not os.path.exists(ud.moddir):
+            fetchcmd = self._buildhgcommand(ud, d, "fetch")
+            logger.info("Fetch " + ud.url)
+            # check out sources there
+            bb.utils.mkdirhier(ud.pkgdir)
+            os.chdir(ud.pkgdir)
+            logger.debug(1, "Running %s", fetchcmd)
+            bb.fetch2.check_network_access(d, fetchcmd, ud.url)
+            runfetchcmd(fetchcmd, d)
+
+        # Even when we clone (fetch), we still need to update as hg's clone
+        # won't checkout the specified revision if its on a branch
+        updatecmd = self._buildhgcommand(ud, d, "update")
+        os.chdir(ud.moddir)
+        logger.debug(1, "Running %s", updatecmd)
+        runfetchcmd(updatecmd, d)
+
+    def clean(self, ud, d):
+        """ Clean the hg dir """
+
+        bb.utils.remove(ud.localpath, True)
+        bb.utils.remove(ud.fullmirror)
+        bb.utils.remove(ud.fullmirror + ".done")
+
+    def supports_srcrev(self):
+        return True
+
+    def _latest_revision(self, ud, d, name):
+        """
+        Compute tip revision for the url
+        """
+        bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info"))
+        output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
+        return output.strip()
+
+    def _build_revision(self, ud, d, name):
+        return ud.revision
+
+    def _revision_key(self, ud, d, name):
+        """
+        Return a unique key for the url
+        """
+        return "hg:" + ud.moddir
+
+    def build_mirror_data(self, ud, d):
+        # Generate a mirror tarball if needed
+        if ud.write_tarballs == "1" and not os.path.exists(ud.fullmirror):
+            # it's possible that this symlink points to read-only filesystem with PREMIRROR
+            if os.path.islink(ud.fullmirror):
+                os.unlink(ud.fullmirror)
+
+            os.chdir(ud.pkgdir)
+            logger.info("Creating tarball of hg repository")
+            runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d)
+            runfetchcmd("touch %s.done" % (ud.fullmirror), d)
+
+    def localpath(self, ud, d):
+        return ud.pkgdir
+
+    def unpack(self, ud, destdir, d):
+        """
+        Make a local clone or export for the url
+        """
+
+        revflag = "-r %s" % ud.revision
+        subdir = ud.parm.get("destsuffix", ud.module)
+        codir = "%s/%s" % (destdir, subdir)
+
+        scmdata = ud.parm.get("scmdata", "")
+        if scmdata != "nokeep":
+            if not os.access(os.path.join(codir, '.hg'), os.R_OK):
+                logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'")
+                runfetchcmd("%s init %s" % (ud.basecmd, codir), d)
+            logger.debug(2, "Unpack: updating source in '" + codir + "'")
+            os.chdir(codir)
+            runfetchcmd("%s pull %s" % (ud.basecmd, ud.moddir), d)
+            runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d)
+        else:
+            logger.debug(2, "Unpack: extracting source to '" + codir + "'")
+            os.chdir(ud.moddir)
+            runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d)

+ 129 - 0
bitbake/lib/bb/fetch2/local.py

@@ -0,0 +1,129 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+
+"""
+
+# Copyright (C) 2003, 2004  Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import urllib
+import bb
+import bb.utils
+from   bb import data
+from   bb.fetch2 import FetchMethod, FetchError
+from   bb.fetch2 import logger
+
+class Local(FetchMethod):
+    def supports(self, urldata, d):
+        """
+        Check to see if a given url represents a local fetch.
+        """
+        return urldata.type in ['file']
+
+    def urldata_init(self, ud, d):
+        # We don't set localfile as for this fetcher the file is already local!
+        ud.decodedurl = urllib.unquote(ud.url.split("://")[1].split(";")[0])
+        ud.basename = os.path.basename(ud.decodedurl)
+        ud.basepath = ud.decodedurl
+        ud.needdonestamp = False
+        return
+
+    def localpath(self, urldata, d):
+        """
+        Return the local filename of a given url assuming a successful fetch.
+        """
+        return self.localpaths(urldata, d)[-1]
+
+    def localpaths(self, urldata, d):
+        """
+        Return the local filename of a given url assuming a successful fetch.
+        """
+        searched = []
+        path = urldata.decodedurl
+        newpath = path
+        if path[0] == "/":
+            return [path]
+        filespath = data.getVar('FILESPATH', d, True)
+        if filespath:
+            logger.debug(2, "Searching for %s in paths:\n    %s" % (path, "\n    ".join(filespath.split(":"))))
+            newpath, hist = bb.utils.which(filespath, path, history=True)
+            searched.extend(hist)
+        if not newpath:
+            filesdir = data.getVar('FILESDIR', d, True)
+            if filesdir:
+                logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
+                newpath = os.path.join(filesdir, path)
+                searched.append(newpath)
+        if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
+            # For expressions using '*', best we can do is take the first directory in FILESPATH that exists
+            newpath, hist = bb.utils.which(filespath, ".", history=True)
+            searched.extend(hist)
+            logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
+            return searched
+        if not os.path.exists(newpath):
+            dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
+            logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
+            bb.utils.mkdirhier(os.path.dirname(dldirfile))
+            searched.append(dldirfile)
+            return searched
+        return searched
+
+    def need_update(self, ud, d):
+        if ud.url.find("*") != -1:
+            return False
+        if os.path.exists(ud.localpath):
+            return False
+        return True
+
+    def download(self, urldata, d):
+        """Fetch urls (no-op for Local method)"""
+        # no need to fetch local files, we'll deal with them in place.
+        if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath):
+            locations = []
+            filespath = data.getVar('FILESPATH', d, True)
+            if filespath:
+                locations = filespath.split(":")
+            filesdir = data.getVar('FILESDIR', d, True)
+            if filesdir:
+                locations.append(filesdir)
+            locations.append(d.getVar("DL_DIR", True))
+
+            msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n    " + "\n    ".join(locations)
+            raise FetchError(msg)
+
+        return True
+
+    def checkstatus(self, fetch, urldata, d):
+        """
+        Check the status of the url
+        """
+        if urldata.localpath.find("*") != -1:
+            logger.info("URL %s looks like a glob and was therefore not checked.", urldata.url)
+            return True
+        if os.path.exists(urldata.localpath):
+            return True
+        return False
+
+    def clean(self, urldata, d):
+        return
+

+ 284 - 0
bitbake/lib/bb/fetch2/npm.py

@@ -0,0 +1,284 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' NPM implementation
+
+The NPM fetcher is used to retrieve files from the npmjs repository
+
+Usage in the recipe:
+
+    SRC_URI = "npm://registry.npmjs.org/;name=${PN};version=${PV}"
+    Suported SRC_URI options are:
+
+    - name
+    - version
+
+    npm://registry.npmjs.org/${PN}/-/${PN}-${PV}.tgz  would become npm://registry.npmjs.org;name=${PN};ver=${PV}
+    The fetcher all triggers off the existence of ud.localpath. If that exists and has the ".done" stamp, its assumed the fetch is good/done
+
+"""
+
+import os
+import sys
+import urllib
+import json
+import subprocess
+import signal
+import bb
+from   bb import data
+from   bb.fetch2 import FetchMethod
+from   bb.fetch2 import FetchError
+from   bb.fetch2 import ChecksumError
+from   bb.fetch2 import runfetchcmd
+from   bb.fetch2 import logger
+from   bb.fetch2 import UnpackError
+from   bb.fetch2 import ParameterError
+from   distutils import spawn
+
+def subprocess_setup():
+    # Python installs a SIGPIPE handler by default. This is usually not what
+    # non-Python subprocesses expect.
+    # SIGPIPE errors are known issues with gzip/bash
+    signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+
+class Npm(FetchMethod):
+
+    """Class to fetch urls via 'npm'"""
+    def init(self, d):
+        pass
+
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with npm
+        """
+        return ud.type in ['npm']
+
+    def debug(self, msg):
+        logger.debug(1, "NpmFetch: %s", msg)
+
+    def clean(self, ud, d):
+        logger.debug(2, "Calling cleanup %s" % ud.pkgname)
+        bb.utils.remove(ud.localpath, False)
+        bb.utils.remove(ud.pkgdatadir, True)
+        bb.utils.remove(ud.fullmirror, False)
+
+    def urldata_init(self, ud, d):
+        """
+        init NPM specific variable within url data
+        """
+        if 'downloadfilename' in ud.parm:
+            ud.basename = ud.parm['downloadfilename']
+        else:
+            ud.basename = os.path.basename(ud.path)
+
+        # can't call it ud.name otherwise fetcher base class will start doing sha1stuff
+        # TODO: find a way to get an sha1/sha256 manifest of pkg & all deps
+        ud.pkgname = ud.parm.get("name", None)
+        if not ud.pkgname:
+            raise ParameterError("NPM fetcher requires a name parameter", ud.url)
+        ud.version = ud.parm.get("version", None)
+        if not ud.version:
+            raise ParameterError("NPM fetcher requires a version parameter", ud.url)
+        ud.bbnpmmanifest = "%s-%s.deps.json" % (ud.pkgname, ud.version)
+        ud.registry = "http://%s" % (ud.url.replace('npm://', '', 1).split(';'))[0]
+        prefixdir = "npm/%s" % ud.pkgname
+        ud.pkgdatadir = d.expand("${DL_DIR}/%s" % prefixdir)
+        if not os.path.exists(ud.pkgdatadir):
+            bb.utils.mkdirhier(ud.pkgdatadir)
+        ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest)
+
+        self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate "
+        self.basecmd += " --directory-prefix=%s " % prefixdir
+
+        ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0")
+        ud.mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version)
+        ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
+
+    def need_update(self, ud, d):
+        if os.path.exists(ud.localpath):
+            return False
+        return True
+
+    def _runwget(self, ud, d, command, quiet):
+        logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
+        bb.fetch2.check_network_access(d, command)
+        runfetchcmd(command, d, quiet)
+
+    def _unpackdep(self, ud, pkg, data, destdir, dldir, d):
+        file = data[pkg]['tgz']
+        logger.debug(2, "file to extract is %s" % file)
+        if file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
+            cmd = 'tar xz --strip 1 --no-same-owner --warning=no-unknown-keyword -f %s/%s' % (dldir, file)
+        else:
+            bb.fatal("NPM package %s downloaded not a tarball!" % file)
+
+        # Change to subdir before executing command
+        save_cwd = os.getcwd()
+        if not os.path.exists(destdir):
+            os.makedirs(destdir)
+        os.chdir(destdir)
+        path = d.getVar('PATH', True)
+        if path:
+            cmd = "PATH=\"%s\" %s" % (path, cmd)
+        bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
+        ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
+        os.chdir(save_cwd)
+
+        if ret != 0:
+            raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url)
+
+        if 'deps' not in data[pkg]:
+            return
+        for dep in data[pkg]['deps']:
+            self._unpackdep(ud, dep, data[pkg]['deps'], "%s/node_modules/%s" % (destdir, dep), dldir, d)
+
+
+    def unpack(self, ud, destdir, d):
+        dldir = d.getVar("DL_DIR", True)
+        depdumpfile = "%s-%s.deps.json" % (ud.pkgname, ud.version)
+        with open("%s/npm/%s" % (dldir, depdumpfile)) as datafile:
+            workobj = json.load(datafile)
+        dldir = "%s/%s" % (os.path.dirname(ud.localpath), ud.pkgname)
+
+        self._unpackdep(ud, ud.pkgname, workobj,  "%s/npmpkg" % destdir, dldir, d)
+
+    def _parse_view(self, output):
+        '''
+        Parse the output of npm view --json; the last JSON result
+        is assumed to be the one that we're interested in.
+        '''
+        pdata = None
+        outdeps = {}
+        datalines = []
+        bracelevel = 0
+        for line in output.splitlines():
+            if bracelevel:
+                datalines.append(line)
+            elif '{' in line:
+                datalines = []
+                datalines.append(line)
+            bracelevel = bracelevel + line.count('{') - line.count('}')
+        if datalines:
+            pdata = json.loads('\n'.join(datalines))
+        return pdata
+
+    def _getdependencies(self, pkg, data, version, d, ud, optional=False):
+        pkgfullname = pkg
+        if version != '*' and not '/' in version:
+            pkgfullname += "@'%s'" % version
+        logger.debug(2, "Calling getdeps on %s" % pkg)
+        fetchcmd = "npm view %s --json --registry %s" % (pkgfullname, ud.registry)
+        output = runfetchcmd(fetchcmd, d, True)
+        pdata = self._parse_view(output)
+        if not pdata:
+            raise FetchError("The command '%s' returned no output" % fetchcmd)
+        if optional:
+            pkg_os = pdata.get('os', None)
+            if pkg_os:
+                if not isinstance(pkg_os, list):
+                    pkg_os = [pkg_os]
+                if 'linux' not in pkg_os or '!linux' in pkg_os:
+                    logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg)
+                    return
+        #logger.debug(2, "Output URL is %s - %s - %s" % (ud.basepath, ud.basename, ud.localfile))
+        outputurl = pdata['dist']['tarball']
+        data[pkg] = {}
+        data[pkg]['tgz'] = os.path.basename(outputurl)
+        self._runwget(ud, d, "%s %s" % (self.basecmd, outputurl), False)
+
+        dependencies = pdata.get('dependencies', {})
+        optionalDependencies = pdata.get('optionalDependencies', {})
+        depsfound = {}
+        optdepsfound = {}
+        data[pkg]['deps'] = {}
+        for dep in dependencies:
+            if dep in optionalDependencies:
+                optdepsfound[dep] = dependencies[dep]
+            else:
+                depsfound[dep] = dependencies[dep]
+        for dep, version in optdepsfound.iteritems():
+            self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True)
+        for dep, version in depsfound.iteritems():
+            self._getdependencies(dep, data[pkg]['deps'], version, d, ud)
+
+    def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest):
+        logger.debug(2, "NPM shrinkwrap file is %s" % data)
+        outputurl = "invalid"
+        if ('resolved' not in data) or (not data['resolved'].startswith('http')):
+            # will be the case for ${PN}
+            fetchcmd = "npm view %s@%s dist.tarball --registry %s" % (pkg, version, ud.registry)
+            logger.debug(2, "Found this matching URL: %s" % str(fetchcmd))
+            outputurl = runfetchcmd(fetchcmd, d, True)
+        else:
+            outputurl = data['resolved']
+        self._runwget(ud, d, "%s %s" % (self.basecmd, outputurl), False)
+        manifest[pkg] = {}
+        manifest[pkg]['tgz'] = os.path.basename(outputurl).rstrip()
+        manifest[pkg]['deps'] = {}
+
+        if pkg in lockdown:
+            sha1_expected = lockdown[pkg][version]
+            sha1_data = bb.utils.sha1_file("npm/%s/%s" % (ud.pkgname, manifest[pkg]['tgz']))
+            if sha1_expected != sha1_data:
+                msg = "\nFile: '%s' has %s checksum %s when %s was expected" % (manifest[pkg]['tgz'], 'sha1', sha1_data, sha1_expected)
+                raise ChecksumError('Checksum mismatch!%s' % msg)
+        else:
+            logger.debug(2, "No lockdown data for %s@%s" % (pkg, version))
+
+        if 'dependencies' in data:
+            for obj in data['dependencies']:
+                logger.debug(2, "Found dep is %s" % str(obj))
+                self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest[pkg]['deps'])
+
+    def download(self, ud, d):
+        """Fetch url"""
+        jsondepobj = {}
+        shrinkobj = {}
+        lockdown = {}
+
+        if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror):
+            dest = d.getVar("DL_DIR", True)
+            bb.utils.mkdirhier(dest)
+            save_cwd = os.getcwd()
+            os.chdir(dest)
+            runfetchcmd("tar -xJf %s" % (ud.fullmirror), d)
+            os.chdir(save_cwd)
+            return
+
+        shwrf = d.getVar('NPM_SHRINKWRAP', True)
+        logger.debug(2, "NPM shrinkwrap file is %s" % shwrf)
+        try:
+            with open(shwrf) as datafile:
+                shrinkobj = json.load(datafile)
+        except:
+            logger.warn('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
+        lckdf = d.getVar('NPM_LOCKDOWN', True)
+        logger.debug(2, "NPM lockdown file is %s" % lckdf)
+        try:
+            with open(lckdf) as datafile:
+                lockdown = json.load(datafile)
+        except:
+            logger.warn('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)
+
+        if ('name' not in shrinkobj):
+            self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud)
+        else:
+            self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d, ud, lockdown, jsondepobj)
+
+        with open(ud.localpath, 'w') as outfile:
+            json.dump(jsondepobj, outfile)
+
+    def build_mirror_data(self, ud, d):
+        # Generate a mirror tarball if needed
+        if ud.write_tarballs and not os.path.exists(ud.fullmirror):
+            # it's possible that this symlink points to read-only filesystem with PREMIRROR
+            if os.path.islink(ud.fullmirror):
+                os.unlink(ud.fullmirror)
+
+            save_cwd = os.getcwd()
+            os.chdir(d.getVar("DL_DIR", True))
+            logger.info("Creating tarball of npm data")
+            runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d)
+            runfetchcmd("touch %s.done" % (ud.fullmirror), d)
+            os.chdir(save_cwd)
+

+ 135 - 0
bitbake/lib/bb/fetch2/osc.py

@@ -0,0 +1,135 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+Bitbake "Fetch" implementation for osc (Opensuse build service client).
+Based on the svn "Fetch" implementation.
+
+"""
+
+import  os
+import  sys
+import logging
+import  bb
+from    bb       import data
+from    bb.fetch2 import FetchMethod
+from    bb.fetch2 import FetchError
+from    bb.fetch2 import MissingParameterError
+from    bb.fetch2 import runfetchcmd
+
+class Osc(FetchMethod):
+    """Class to fetch a module or modules from Opensuse build server
+       repositories."""
+
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with osc.
+        """
+        return ud.type in ['osc']
+
+    def urldata_init(self, ud, d):
+        if not "module" in ud.parm:
+            raise MissingParameterError('module', ud.url)
+
+        ud.module = ud.parm["module"]
+
+        # Create paths to osc checkouts
+        relpath = self._strip_leading_slashes(ud.path)
+        ud.pkgdir = os.path.join(d.getVar('OSCDIR', True), ud.host)
+        ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
+
+        if 'rev' in ud.parm:
+            ud.revision = ud.parm['rev']
+        else:
+            pv = d.getVar("PV", False)
+            rev = bb.fetch2.srcrev_internal_helper(ud, d)
+            if rev and rev != True:
+                ud.revision = rev
+            else:
+                ud.revision = ""
+
+        ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
+
+    def _buildosccommand(self, ud, d, command):
+        """
+        Build up an ocs commandline based on ud
+        command is "fetch", "update", "info"
+        """
+
+        basecmd = data.expand('${FETCHCMD_osc}', d)
+
+        proto = ud.parm.get('protocol', 'ocs')
+
+        options = []
+
+        config = "-c %s" % self.generate_config(ud, d)
+
+        if ud.revision:
+            options.append("-r %s" % ud.revision)
+
+        coroot = self._strip_leading_slashes(ud.path)
+
+        if command == "fetch":
+            osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
+        elif command == "update":
+            osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
+        else:
+            raise FetchError("Invalid osc command %s" % command, ud.url)
+
+        return osccmd
+
+    def download(self, ud, d):
+        """
+        Fetch url
+        """
+
+        logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
+
+        if os.access(os.path.join(d.getVar('OSCDIR', True), ud.path, ud.module), os.R_OK):
+            oscupdatecmd = self._buildosccommand(ud, d, "update")
+            logger.info("Update "+ ud.url)
+            # update sources there
+            os.chdir(ud.moddir)
+            logger.debug(1, "Running %s", oscupdatecmd)
+            bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
+            runfetchcmd(oscupdatecmd, d)
+        else:
+            oscfetchcmd = self._buildosccommand(ud, d, "fetch")
+            logger.info("Fetch " + ud.url)
+            # check out sources there
+            bb.utils.mkdirhier(ud.pkgdir)
+            os.chdir(ud.pkgdir)
+            logger.debug(1, "Running %s", oscfetchcmd)
+            bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
+            runfetchcmd(oscfetchcmd, d)
+
+        os.chdir(os.path.join(ud.pkgdir + ud.path))
+        # tar them up to a defined filename
+        runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath])
+
+    def supports_srcrev(self):
+        return False
+
+    def generate_config(self, ud, d):
+        """
+        Generate a .oscrc to be used for this run.
+        """
+
+        config_path = os.path.join(d.getVar('OSCDIR', True), "oscrc")
+        if (os.path.exists(config_path)):
+            os.remove(config_path)
+
+        f = open(config_path, 'w')
+        f.write("[general]\n")
+        f.write("apisrv = %s\n" % ud.host)
+        f.write("scheme = http\n")
+        f.write("su-wrapper = su -c\n")
+        f.write("build-root = %s\n" % d.getVar('WORKDIR', True))
+        f.write("urllist = %s\n" % d.getVar("OSCURLLIST", True))
+        f.write("extra-pkgs = gzip\n")
+        f.write("\n")
+        f.write("[%s]\n" % ud.host)
+        f.write("user = %s\n" % ud.parm["user"])
+        f.write("pass = %s\n" % ud.parm["pswd"])
+        f.close()
+
+        return config_path

+ 187 - 0
bitbake/lib/bb/fetch2/perforce.py

@@ -0,0 +1,187 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+
+"""
+
+# Copyright (C) 2003, 2004  Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+from future_builtins import zip
+import os
+import subprocess
+import logging
+import bb
+from   bb import data
+from   bb.fetch2 import FetchMethod
+from   bb.fetch2 import FetchError
+from   bb.fetch2 import logger
+from   bb.fetch2 import runfetchcmd
+
+class Perforce(FetchMethod):
+    def supports(self, ud, d):
+        return ud.type in ['p4']
+
+    def doparse(url, d):
+        parm = {}
+        path = url.split("://")[1]
+        delim = path.find("@");
+        if delim != -1:
+            (user, pswd, host, port) = path.split('@')[0].split(":")
+            path = path.split('@')[1]
+        else:
+            (host, port) = d.getVar('P4PORT', False).split(':')
+            user = ""
+            pswd = ""
+
+        if path.find(";") != -1:
+            keys=[]
+            values=[]
+            plist = path.split(';')
+            for item in plist:
+                if item.count('='):
+                    (key, value) = item.split('=')
+                    keys.append(key)
+                    values.append(value)
+
+            parm = dict(zip(keys, values))
+        path = "//" + path.split(';')[0]
+        host += ":%s" % (port)
+        parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
+
+        return host, path, user, pswd, parm
+    doparse = staticmethod(doparse)
+
+    def getcset(d, depot, host, user, pswd, parm):
+        p4opt = ""
+        if "cset" in parm:
+            return parm["cset"];
+        if user:
+            p4opt += " -u %s" % (user)
+        if pswd:
+            p4opt += " -P %s" % (pswd)
+        if host:
+            p4opt += " -p %s" % (host)
+
+        p4date = d.getVar("P4DATE", True)
+        if "revision" in parm:
+            depot += "#%s" % (parm["revision"])
+        elif "label" in parm:
+            depot += "@%s" % (parm["label"])
+        elif p4date:
+            depot += "@%s" % (p4date)
+
+        p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
+        logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
+        p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
+        cset = p4file.strip()
+        logger.debug(1, "READ %s", cset)
+        if not cset:
+            return -1
+
+        return cset.split(' ')[1]
+    getcset = staticmethod(getcset)
+
+    def urldata_init(self, ud, d):
+        (host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
+
+        base_path = path.replace('/...', '')
+        base_path = self._strip_leading_slashes(base_path)
+        
+        if "label" in parm:
+            version = parm["label"]
+        else:
+            version = Perforce.getcset(d, path, host, user, pswd, parm)
+
+        ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base_path.replace('/', '.'), version), d)
+
+    def download(self, ud, d):
+        """
+        Fetch urls
+        """
+
+        (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d)
+
+        if depot.find('/...') != -1:
+            path = depot[:depot.find('/...')]
+        else:
+            path = depot[:depot.rfind('/')]
+
+        module = parm.get('module', os.path.basename(path))
+
+        # Get the p4 command
+        p4opt = ""
+        if user:
+            p4opt += " -u %s" % (user)
+
+        if pswd:
+            p4opt += " -P %s" % (pswd)
+
+        if host:
+            p4opt += " -p %s" % (host)
+
+        p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
+
+        # create temp directory
+        logger.debug(2, "Fetch: creating temporary directory")
+        bb.utils.mkdirhier(d.expand('${WORKDIR}'))
+        mktemp = d.getVar("FETCHCMD_p4mktemp", True) or d.expand("mktemp -d -q '${WORKDIR}/oep4.XXXXXX'")
+        tmpfile, errors = bb.process.run(mktemp)
+        tmpfile = tmpfile.strip()
+        if not tmpfile:
+            raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url)
+
+        if "label" in parm:
+            depot = "%s@%s" % (depot, parm["label"])
+        else:
+            cset = Perforce.getcset(d, depot, host, user, pswd, parm)
+            depot = "%s@%s" % (depot, cset)
+
+        os.chdir(tmpfile)
+        logger.info("Fetch " + ud.url)
+        logger.info("%s%s files %s", p4cmd, p4opt, depot)
+        p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
+        p4file = [f.rstrip() for f in p4file.splitlines()]
+
+        if not p4file:
+            raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url)
+
+        count = 0
+
+        for file in p4file:
+            list = file.split()
+
+            if list[2] == "delete":
+                continue
+
+            dest = list[0][len(path)+1:]
+            where = dest.find("#")
+
+            subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True)
+            count = count + 1
+
+        if count == 0:
+            logger.error()
+            raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url)
+
+        runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
+        # cleanup
+        bb.utils.prunedir(tmpfile)

+ 98 - 0
bitbake/lib/bb/fetch2/repo.py

@@ -0,0 +1,98 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake "Fetch" repo (git) implementation
+
+"""
+
+# Copyright (C) 2009 Tom Rini <trini@embeddedalley.com>
+#
+# Based on git.py which is:
+#Copyright (C) 2005 Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import bb
+from   bb    import data
+from   bb.fetch2 import FetchMethod
+from   bb.fetch2 import runfetchcmd
+
+class Repo(FetchMethod):
+    """Class to fetch a module or modules from repo (git) repositories"""
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with repo.
+        """
+        return ud.type in ["repo"]
+
+    def urldata_init(self, ud, d):
+        """
+        We don"t care about the git rev of the manifests repository, but
+        we do care about the manifest to use.  The default is "default".
+        We also care about the branch or tag to be used.  The default is
+        "master".
+        """
+
+        ud.proto = ud.parm.get('protocol', 'git')
+        ud.branch = ud.parm.get('branch', 'master')
+        ud.manifest = ud.parm.get('manifest', 'default.xml')
+        if not ud.manifest.endswith('.xml'):
+            ud.manifest += '.xml'
+
+        ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
+
+    def download(self, ud, d):
+        """Fetch url"""
+
+        if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
+            logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
+            return
+
+        gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
+        repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
+        codir = os.path.join(repodir, gitsrcname, ud.manifest)
+
+        if ud.user:
+            username = ud.user + "@"
+        else:
+            username = ""
+
+        bb.utils.mkdirhier(os.path.join(codir, "repo"))
+        os.chdir(os.path.join(codir, "repo"))
+        if not os.path.exists(os.path.join(codir, "repo", ".repo")):
+            bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
+            runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
+
+        bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
+        runfetchcmd("repo sync", d)
+        os.chdir(codir)
+
+        scmdata = ud.parm.get("scmdata", "")
+        if scmdata == "keep":
+            tar_flags = ""
+        else:
+            tar_flags = "--exclude '.repo' --exclude '.git'"
+
+        # Create a cache
+        runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
+
+    def supports_srcrev(self):
+        return False
+
+    def _build_revision(self, ud, d):
+        return ud.manifest
+
+    def _want_sortable_revision(self, ud, d):
+        return False

+ 129 - 0
bitbake/lib/bb/fetch2/sftp.py

@@ -0,0 +1,129 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake SFTP Fetch implementation
+
+Class for fetching files via SFTP. It tries to adhere to the (now
+expired) IETF Internet Draft for "Uniform Resource Identifier (URI)
+Scheme for Secure File Transfer Protocol (SFTP) and Secure Shell
+(SSH)" (SECSH URI).
+
+It uses SFTP (as to adhere to the SECSH URI specification). It only
+supports key based authentication, not password. This class, unlike
+the SSH fetcher, does not support fetching a directory tree from the
+remote.
+
+  http://tools.ietf.org/html/draft-ietf-secsh-scp-sftp-ssh-uri-04
+  https://www.iana.org/assignments/uri-schemes/prov/sftp
+  https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13
+
+Please note that '/' is used as host path seperator, and not ":"
+as you may be used to from the scp/sftp commands. You can use a
+~ (tilde) to specify a path relative to your home directory.
+(The /~user/ syntax, for specyfing a path relative to another
+user's home directory is not supported.) Note that the tilde must
+still follow the host path seperator ("/"). See exampels below.
+
+Example SRC_URIs:
+
+SRC_URI = "sftp://host.example.com/dir/path.file.txt"
+
+A path relative to your home directory.
+
+SRC_URI = "sftp://host.example.com/~/dir/path.file.txt"
+
+You can also specify a username (specyfing password in the
+URI is not supported, use SSH keys to authenticate):
+
+SRC_URI = "sftp://user@host.example.com/dir/path.file.txt"
+
+"""
+
+# Copyright (C) 2013, Olof Johansson <olof.johansson@axis.com>
+#
+# Based in part on bb.fetch2.wget:
+#    Copyright (C) 2003, 2004  Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import bb
+import urllib
+import commands
+from bb import data
+from bb.fetch2 import URI
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import runfetchcmd
+
+
+class SFTP(FetchMethod):
+    """Class to fetch urls via 'sftp'"""
+
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with sftp.
+        """
+        return ud.type in ['sftp']
+
+    def recommends_checksum(self, urldata):
+        return True
+
+    def urldata_init(self, ud, d):
+        if 'protocol' in ud.parm and ud.parm['protocol'] == 'git':
+            raise bb.fetch2.ParameterError(
+                "Invalid protocol - if you wish to fetch from a " +
+                "git repository using ssh, you need to use the " +
+                "git:// prefix with protocol=ssh", ud.url)
+
+        if 'downloadfilename' in ud.parm:
+            ud.basename = ud.parm['downloadfilename']
+        else:
+            ud.basename = os.path.basename(ud.path)
+
+        ud.localfile = data.expand(urllib.unquote(ud.basename), d)
+
+    def download(self, ud, d):
+        """Fetch urls"""
+
+        urlo = URI(ud.url)
+        basecmd = 'sftp -oBatchMode=yes'
+        port = ''
+        if urlo.port:
+            port = '-P %d' % urlo.port
+            urlo.port = None
+
+        dldir = data.getVar('DL_DIR', d, True)
+        lpath = os.path.join(dldir, ud.localfile)
+
+        user = ''
+        if urlo.userinfo:
+            user = urlo.userinfo + '@'
+
+        path = urlo.path
+
+        # Supoprt URIs relative to the user's home directory, with
+        # the tilde syntax. (E.g. <sftp://example.com/~/foo.diff>).
+        if path[:3] == '/~/':
+            path = path[3:]
+
+        remote = '%s%s:%s' % (user, urlo.hostname, path)
+
+        cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote),
+                               commands.mkarg(lpath))
+
+        bb.fetch2.check_network_access(d, cmd, ud.url)
+        runfetchcmd(cmd, d)
+        return True

+ 128 - 0
bitbake/lib/bb/fetch2/ssh.py

@@ -0,0 +1,128 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+'''
+BitBake 'Fetch' implementations
+
+This implementation is for Secure Shell (SSH), and attempts to comply with the
+IETF secsh internet draft:
+    http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
+
+    Currently does not support the sftp parameters, as this uses scp
+    Also does not support the 'fingerprint' connection parameter.
+
+    Please note that '/' is used as host, path separator not ':' as you may 
+    be used to, also '~' can be used to specify user HOME, but again after '/'
+    
+    Example SRC_URI:
+    SRC_URI = "ssh://user@host.example.com/dir/path/file.txt"
+    SRC_URI = "ssh://user@host.example.com/~/file.txt"
+'''
+
+# Copyright (C) 2006  OpenedHand Ltd.
+#
+#
+# Based in part on svk.py:
+#    Copyright (C) 2006 Holger Hans Peter Freyther
+#    Based on svn.py:
+#        Copyright (C) 2003, 2004  Chris Larson
+#        Based on functions from the base bb module:
+#            Copyright 2003 Holger Schurig
+#
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import re, os
+from   bb import data
+from   bb.fetch2 import FetchMethod
+from   bb.fetch2 import FetchError
+from   bb.fetch2 import logger
+from   bb.fetch2 import runfetchcmd
+
+
+__pattern__ = re.compile(r'''
+ \s*                 # Skip leading whitespace
+ ssh://              # scheme
+ (                   # Optional username/password block
+  (?P<user>\S+)      # username
+  (:(?P<pass>\S+))?  # colon followed by the password (optional)
+ )?
+ (?P<cparam>(;[^;]+)*)?  # connection parameters block (optional)
+ @
+ (?P<host>\S+?)          # non-greedy match of the host
+ (:(?P<port>[0-9]+))?    # colon followed by the port (optional)
+ /
+ (?P<path>[^;]+)         # path on the remote system, may be absolute or relative,
+                         # and may include the use of '~' to reference the remote home
+                         # directory
+ (?P<sparam>(;[^;]+)*)?  # parameters block (optional)
+ $
+''', re.VERBOSE)
+
+class SSH(FetchMethod):
+    '''Class to fetch a module or modules via Secure Shell'''
+
+    def supports(self, urldata, d):
+        return __pattern__.match(urldata.url) != None
+
+    def supports_checksum(self, urldata):
+        return False
+
+    def urldata_init(self, urldata, d):
+        if 'protocol' in urldata.parm and urldata.parm['protocol'] == 'git':
+            raise bb.fetch2.ParameterError(
+                "Invalid protocol - if you wish to fetch from a git " +
+                "repository using ssh, you need to use " +
+                "git:// prefix with protocol=ssh", urldata.url)
+        m = __pattern__.match(urldata.url)
+        path = m.group('path')
+        host = m.group('host')
+        urldata.localpath = os.path.join(d.getVar('DL_DIR', True),
+                os.path.basename(os.path.normpath(path)))
+
+    def download(self, urldata, d):
+        dldir = d.getVar('DL_DIR', True)
+
+        m = __pattern__.match(urldata.url)
+        path = m.group('path')
+        host = m.group('host')
+        port = m.group('port')
+        user = m.group('user')
+        password = m.group('pass')
+
+        if port:
+            portarg = '-P %s' % port
+        else:
+            portarg = ''
+
+        if user:
+            fr = user
+            if password:
+                fr += ':%s' % password
+            fr += '@%s' % host
+        else:
+            fr = host
+        fr += ':%s' % path
+
+
+        import commands
+        cmd = 'scp -B -r %s %s %s/' % (
+            portarg,
+            commands.mkarg(fr),
+            commands.mkarg(dldir)
+        )
+
+        bb.fetch2.check_network_access(d, cmd, urldata.url)
+
+        runfetchcmd(cmd, d)
+

+ 197 - 0
bitbake/lib/bb/fetch2/svn.py

@@ -0,0 +1,197 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementation for svn.
+
+"""
+
+# Copyright (C) 2003, 2004  Chris Larson
+# Copyright (C) 2004        Marcin Juszkiewicz
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import sys
+import logging
+import bb
+import re
+from   bb import data
+from   bb.fetch2 import FetchMethod
+from   bb.fetch2 import FetchError
+from   bb.fetch2 import MissingParameterError
+from   bb.fetch2 import runfetchcmd
+from   bb.fetch2 import logger
+
+class Svn(FetchMethod):
+    """Class to fetch a module or modules from svn repositories"""
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with svn.
+        """
+        return ud.type in ['svn']
+
+    def urldata_init(self, ud, d):
+        """
+        init svn specific variable within url data
+        """
+        if not "module" in ud.parm:
+            raise MissingParameterError('module', ud.url)
+
+        ud.basecmd = d.getVar('FETCHCMD_svn', True)
+
+        ud.module = ud.parm["module"]
+
+        if not "path_spec" in ud.parm:
+            ud.path_spec = ud.module
+        else:
+            ud.path_spec = ud.parm["path_spec"]
+
+        # Create paths to svn checkouts
+        relpath = self._strip_leading_slashes(ud.path)
+        ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
+        ud.moddir = os.path.join(ud.pkgdir, ud.module)
+
+        ud.setup_revisons(d)
+
+        if 'rev' in ud.parm:
+            ud.revision = ud.parm['rev']
+
+        ud.localfile = data.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
+
+    def _buildsvncommand(self, ud, d, command):
+        """
+        Build up an svn commandline based on ud
+        command is "fetch", "update", "info"
+        """
+
+        proto = ud.parm.get('protocol', 'svn')
+
+        svn_rsh = None
+        if proto == "svn+ssh" and "rsh" in ud.parm:
+            svn_rsh = ud.parm["rsh"]
+
+        svnroot = ud.host + ud.path
+
+        options = []
+
+        options.append("--no-auth-cache")
+
+        if ud.user:
+            options.append("--username %s" % ud.user)
+
+        if ud.pswd:
+            options.append("--password %s" % ud.pswd)
+
+        if command == "info":
+            svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
+        elif command == "log1":
+            svncmd = "%s log --limit 1 %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
+        else:
+            suffix = ""
+            if ud.revision:
+                options.append("-r %s" % ud.revision)
+                suffix = "@%s" % (ud.revision)
+
+            if command == "fetch":
+                transportuser = ud.parm.get("transportuser", "")
+                svncmd = "%s co %s %s://%s%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, transportuser, svnroot, ud.module, suffix, ud.path_spec)
+            elif command == "update":
+                svncmd = "%s update %s" % (ud.basecmd, " ".join(options))
+            else:
+                raise FetchError("Invalid svn command %s" % command, ud.url)
+
+        if svn_rsh:
+            svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
+
+        return svncmd
+
+    def download(self, ud, d):
+        """Fetch url"""
+
+        logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
+
+        if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
+            svnupdatecmd = self._buildsvncommand(ud, d, "update")
+            logger.info("Update " + ud.url)
+            # update sources there
+            os.chdir(ud.moddir)
+            # We need to attempt to run svn upgrade first in case its an older working format
+            try:
+                runfetchcmd(ud.basecmd + " upgrade", d)
+            except FetchError:
+                pass
+            logger.debug(1, "Running %s", svnupdatecmd)
+            bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
+            runfetchcmd(svnupdatecmd, d)
+        else:
+            svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
+            logger.info("Fetch " + ud.url)
+            # check out sources there
+            bb.utils.mkdirhier(ud.pkgdir)
+            os.chdir(ud.pkgdir)
+            logger.debug(1, "Running %s", svnfetchcmd)
+            bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
+            runfetchcmd(svnfetchcmd, d)
+
+        scmdata = ud.parm.get("scmdata", "")
+        if scmdata == "keep":
+            tar_flags = ""
+        else:
+            tar_flags = "--exclude '.svn'"
+
+        os.chdir(ud.pkgdir)
+        # tar them up to a defined filename
+        runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d, cleanup = [ud.localpath])
+
+    def clean(self, ud, d):
+        """ Clean SVN specific files and dirs """
+
+        bb.utils.remove(ud.localpath)
+        bb.utils.remove(ud.moddir, True)
+        
+
+    def supports_srcrev(self):
+        return True
+
+    def _revision_key(self, ud, d, name):
+        """
+        Return a unique key for the url
+        """
+        return "svn:" + ud.moddir
+
+    def _latest_revision(self, ud, d, name):
+        """
+        Return the latest upstream revision number
+        """
+        bb.fetch2.check_network_access(d, self._buildsvncommand(ud, d, "log1"))
+
+        output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "log1"), d, True)
+
+        # skip the first line, as per output of svn log
+        # then we expect the revision on the 2nd line
+        revision = re.search('^r([0-9]*)', output.splitlines()[1]).group(1)
+
+        return revision
+
+    def sortable_revision(self, ud, d, name):
+        """
+        Return a sortable revision number which in our case is the revision number
+        """
+
+        return False, self._build_revision(ud, d)
+
+    def _build_revision(self, ud, d):
+        return ud.revision

+ 555 - 0
bitbake/lib/bb/fetch2/wget.py

@@ -0,0 +1,555 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+
+"""
+
+# Copyright (C) 2003, 2004  Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import re
+import tempfile
+import subprocess
+import os
+import logging
+import bb
+import urllib
+from   bb import data
+from   bb.fetch2 import FetchMethod
+from   bb.fetch2 import FetchError
+from   bb.fetch2 import logger
+from   bb.fetch2 import runfetchcmd
+from   bb.utils import export_proxies
+from   bs4 import BeautifulSoup
+from   bs4 import SoupStrainer
+
+class Wget(FetchMethod):
+    """Class to fetch urls via 'wget'"""
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with wget.
+        """
+        return ud.type in ['http', 'https', 'ftp']
+
+    def recommends_checksum(self, urldata):
+        return True
+
+    def urldata_init(self, ud, d):
+        if 'protocol' in ud.parm:
+            if ud.parm['protocol'] == 'git':
+                raise bb.fetch2.ParameterError("Invalid protocol - if you wish to fetch from a git repository using http, you need to instead use the git:// prefix with protocol=http", ud.url)
+
+        if 'downloadfilename' in ud.parm:
+            ud.basename = ud.parm['downloadfilename']
+        else:
+            ud.basename = os.path.basename(ud.path)
+
+        ud.localfile = data.expand(urllib.unquote(ud.basename), d)
+        if not ud.localfile:
+            ud.localfile = data.expand(urllib.unquote(ud.host + ud.path).replace("/", "."), d)
+
+        self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"
+
+    def _runwget(self, ud, d, command, quiet):
+
+        logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
+        bb.fetch2.check_network_access(d, command)
+        runfetchcmd(command, d, quiet)
+
+    def download(self, ud, d):
+        """Fetch urls"""
+
+        fetchcmd = self.basecmd
+
+        if 'downloadfilename' in ud.parm:
+            dldir = d.getVar("DL_DIR", True)
+            bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
+            fetchcmd += " -O " + dldir + os.sep + ud.localfile
+
+        uri = ud.url.split(";")[0]
+        if os.path.exists(ud.localpath):
+            # file exists, but we didnt complete it.. trying again..
+            fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri)
+        else:
+            fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri)
+
+        self._runwget(ud, d, fetchcmd, False)
+
+        # Sanity check since wget can pretend it succeed when it didn't
+        # Also, this used to happen if sourceforge sent us to the mirror page
+        if not os.path.exists(ud.localpath):
+            raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
+
+        if os.path.getsize(ud.localpath) == 0:
+            os.remove(ud.localpath)
+            raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
+
+        return True
+
+    def checkstatus(self, fetch, ud, d):
+        import urllib2, socket, httplib
+        from urllib import addinfourl
+        from bb.fetch2 import FetchConnectionCache
+
+        class HTTPConnectionCache(httplib.HTTPConnection):
+            if fetch.connection_cache:
+                def connect(self):
+                    """Connect to the host and port specified in __init__."""
+
+                    sock = fetch.connection_cache.get_connection(self.host, self.port)
+                    if sock:
+                        self.sock = sock
+                    else:
+                        self.sock = socket.create_connection((self.host, self.port),
+                                    self.timeout, self.source_address)
+                        fetch.connection_cache.add_connection(self.host, self.port, self.sock)
+
+                    if self._tunnel_host:
+                        self._tunnel()
+
+        class CacheHTTPHandler(urllib2.HTTPHandler):
+            def http_open(self, req):
+                return self.do_open(HTTPConnectionCache, req)
+
+            def do_open(self, http_class, req):
+                """Return an addinfourl object for the request, using http_class.
+
+                http_class must implement the HTTPConnection API from httplib.
+                The addinfourl return value is a file-like object.  It also
+                has methods and attributes including:
+                    - info(): return a mimetools.Message object for the headers
+                    - geturl(): return the original request URL
+                    - code: HTTP status code
+                """
+                host = req.get_host()
+                if not host:
+                    raise urlllib2.URLError('no host given')
+
+                h = http_class(host, timeout=req.timeout) # will parse host:port
+                h.set_debuglevel(self._debuglevel)
+
+                headers = dict(req.unredirected_hdrs)
+                headers.update(dict((k, v) for k, v in req.headers.items()
+                            if k not in headers))
+
+                # We want to make an HTTP/1.1 request, but the addinfourl
+                # class isn't prepared to deal with a persistent connection.
+                # It will try to read all remaining data from the socket,
+                # which will block while the server waits for the next request.
+                # So make sure the connection gets closed after the (only)
+                # request.
+
+                # Don't close connection when connection_cache is enabled,
+                if fetch.connection_cache is None: 
+                    headers["Connection"] = "close"
+                else:
+                    headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
+
+                headers = dict(
+                    (name.title(), val) for name, val in headers.items())
+
+                if req._tunnel_host:
+                    tunnel_headers = {}
+                    proxy_auth_hdr = "Proxy-Authorization"
+                    if proxy_auth_hdr in headers:
+                        tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
+                        # Proxy-Authorization should not be sent to origin
+                        # server.
+                        del headers[proxy_auth_hdr]
+                    h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
+
+                try:
+                    h.request(req.get_method(), req.get_selector(), req.data, headers)
+                except socket.error, err: # XXX what error?
+                    # Don't close connection when cache is enabled.
+                    if fetch.connection_cache is None:
+                        h.close()
+                    raise urllib2.URLError(err)
+                else:
+                    try:
+                        r = h.getresponse(buffering=True)
+                    except TypeError: # buffering kw not supported
+                        r = h.getresponse()
+
+                # Pick apart the HTTPResponse object to get the addinfourl
+                # object initialized properly.
+
+                # Wrap the HTTPResponse object in socket's file object adapter
+                # for Windows.  That adapter calls recv(), so delegate recv()
+                # to read().  This weird wrapping allows the returned object to
+                # have readline() and readlines() methods.
+
+                # XXX It might be better to extract the read buffering code
+                # out of socket._fileobject() and into a base class.
+                r.recv = r.read
+
+                # no data, just have to read
+                r.read()
+                class fp_dummy(object):
+                    def read(self):
+                        return ""
+                    def readline(self):
+                        return ""
+                    def close(self):
+                        pass
+
+                resp = addinfourl(fp_dummy(), r.msg, req.get_full_url())
+                resp.code = r.status
+                resp.msg = r.reason
+
+                # Close connection when server request it.
+                if fetch.connection_cache is not None:
+                    if 'Connection' in r.msg and r.msg['Connection'] == 'close':
+                        fetch.connection_cache.remove_connection(h.host, h.port)
+
+                return resp
+
+        class HTTPMethodFallback(urllib2.BaseHandler):
+            """
+            Fallback to GET if HEAD is not allowed (405 HTTP error)
+            """
+            def http_error_405(self, req, fp, code, msg, headers):
+                fp.read()
+                fp.close()
+
+                newheaders = dict((k,v) for k,v in req.headers.items()
+                                  if k.lower() not in ("content-length", "content-type"))
+                return self.parent.open(urllib2.Request(req.get_full_url(),
+                                                        headers=newheaders,
+                                                        origin_req_host=req.get_origin_req_host(),
+                                                        unverifiable=True))
+
+            """
+            Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403
+            Forbidden when they actually mean 405 Method Not Allowed.
+            """
+            http_error_403 = http_error_405
+
+            """
+            Some servers (e.g. FusionForge) returns 406 Not Acceptable when they
+            actually mean 405 Method Not Allowed.
+            """
+            http_error_406 = http_error_405
+
+        class FixedHTTPRedirectHandler(urllib2.HTTPRedirectHandler):
+            """
+            urllib2.HTTPRedirectHandler resets the method to GET on redirect,
+            when we want to follow redirects using the original method.
+            """
+            def redirect_request(self, req, fp, code, msg, headers, newurl):
+                newreq = urllib2.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
+                newreq.get_method = lambda: req.get_method()
+                return newreq
+        exported_proxies = export_proxies(d)
+
+        handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback]
+        if export_proxies:
+            handlers.append(urllib2.ProxyHandler())
+        handlers.append(CacheHTTPHandler())
+        # XXX: Since Python 2.7.9 ssl cert validation is enabled by default
+        # see PEP-0476, this causes verification errors on some https servers
+        # so disable by default.
+        import ssl
+        if hasattr(ssl, '_create_unverified_context'):
+            handlers.append(urllib2.HTTPSHandler(context=ssl._create_unverified_context()))
+        opener = urllib2.build_opener(*handlers)
+
+        try:
+            uri = ud.url.split(";")[0]
+            r = urllib2.Request(uri)
+            r.get_method = lambda: "HEAD"
+            opener.open(r)
+        except urllib2.URLError as e:
+            # debug for now to avoid spamming the logs in e.g. remote sstate searches
+            logger.debug(2, "checkstatus() urlopen failed: %s" % e)
+            return False
+        return True
+
+    def _parse_path(self, regex, s):
+        """
+        Find and group name, version and archive type in the given string s
+        """
+
+        m = regex.search(s)
+        if m:
+            pname = ''
+            pver = ''
+            ptype = ''
+
+            mdict = m.groupdict()
+            if 'name' in mdict.keys():
+                pname = mdict['name']
+            if 'pver' in mdict.keys():
+                pver = mdict['pver']
+            if 'type' in mdict.keys():
+                ptype = mdict['type']
+
+            bb.debug(3, "_parse_path: %s, %s, %s" % (pname, pver, ptype))
+
+            return (pname, pver, ptype)
+
+        return None
+
+    def _modelate_version(self, version):
+        if version[0] in ['.', '-']:
+            if version[1].isdigit():
+                version = version[1] + version[0] + version[2:len(version)]
+            else:
+                version = version[1:len(version)]
+
+        version = re.sub('-', '.', version)
+        version = re.sub('_', '.', version)
+        version = re.sub('(rc)+', '.1000.', version)
+        version = re.sub('(beta)+', '.100.', version)
+        version = re.sub('(alpha)+', '.10.', version)
+        if version[0] == 'v':
+            version = version[1:len(version)]
+        return version
+
+    def _vercmp(self, old, new):
+        """
+        Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
+        purpose. PE is cleared in comparison as it's not for build, and PR is cleared too
+        for simplicity as it's somehow difficult to get from various upstream format
+        """
+
+        (oldpn, oldpv, oldsuffix) = old
+        (newpn, newpv, newsuffix) = new
+
+        """
+        Check for a new suffix type that we have never heard of before
+        """
+        if (newsuffix):
+            m = self.suffix_regex_comp.search(newsuffix)
+            if not m:
+                bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
+                return False
+
+        """
+        Not our package so ignore it
+        """
+        if oldpn != newpn:
+            return False
+
+        oldpv = self._modelate_version(oldpv)
+        newpv = self._modelate_version(newpv)
+
+        return bb.utils.vercmp(("0", oldpv, ""), ("0", newpv, ""))
+
+    def _fetch_index(self, uri, ud, d):
+        """
+        Run fetch checkstatus to get directory information
+        """
+        f = tempfile.NamedTemporaryFile()
+
+        agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
+        fetchcmd = self.basecmd
+        fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'"
+        try:
+            self._runwget(ud, d, fetchcmd, True)
+            fetchresult = f.read()
+        except bb.fetch2.BBFetchException:
+            fetchresult = ""
+
+        f.close()
+        return fetchresult
+
+    def _check_latest_version(self, url, package, package_regex, current_version, ud, d):
+        """
+        Return the latest version of a package inside a given directory path
+        If error or no version, return ""
+        """
+        valid = 0
+        version = ['', '', '']
+
+        bb.debug(3, "VersionURL: %s" % (url))
+        soup = BeautifulSoup(self._fetch_index(url, ud, d), "html.parser", parse_only=SoupStrainer("a"))
+        if not soup:
+            bb.debug(3, "*** %s NO SOUP" % (url))
+            return ""
+
+        for line in soup.find_all('a', href=True):
+            bb.debug(3, "line['href'] = '%s'" % (line['href']))
+            bb.debug(3, "line = '%s'" % (str(line)))
+
+            newver = self._parse_path(package_regex, line['href'])
+            if not newver:
+                newver = self._parse_path(package_regex, str(line))
+
+            if newver:
+                bb.debug(3, "Upstream version found: %s" % newver[1])
+                if valid == 0:
+                    version = newver
+                    valid = 1
+                elif self._vercmp(version, newver) < 0:
+                    version = newver
+                
+        pupver = re.sub('_', '.', version[1])
+
+        bb.debug(3, "*** %s -> UpstreamVersion = %s (CurrentVersion = %s)" %
+                (package, pupver or "N/A", current_version[1]))
+
+        if valid:
+            return pupver
+
+        return ""
+
+    def _check_latest_version_by_dir(self, dirver, package, package_regex,
+            current_version, ud, d):
+        """
+            Scan every directory in order to get upstream version.
+        """
+        version_dir = ['', '', '']
+        version = ['', '', '']
+
+        dirver_regex = re.compile("(\D*)((\d+[\.\-_])+(\d+))")
+        s = dirver_regex.search(dirver)
+        if s:
+            version_dir[1] = s.group(2)
+        else:
+            version_dir[1] = dirver
+
+        dirs_uri = bb.fetch.encodeurl([ud.type, ud.host,
+                ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
+        bb.debug(3, "DirURL: %s, %s" % (dirs_uri, package))
+
+        soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d), "html.parser", parse_only=SoupStrainer("a"))
+        if not soup:
+            return version[1]
+
+        for line in soup.find_all('a', href=True):
+            s = dirver_regex.search(line['href'].strip("/"))
+            if s:
+                version_dir_new = ['', s.group(2), '']
+                if self._vercmp(version_dir, version_dir_new) <= 0:
+                    dirver_new = s.group(1) + s.group(2)
+                    path = ud.path.replace(dirver, dirver_new, True) \
+                        .split(package)[0]
+                    uri = bb.fetch.encodeurl([ud.type, ud.host, path,
+                        ud.user, ud.pswd, {}])
+
+                    pupver = self._check_latest_version(uri,
+                            package, package_regex, current_version, ud, d)
+                    if pupver:
+                        version[1] = pupver
+
+                    version_dir = version_dir_new
+
+        return version[1]
+
+    def _init_regexes(self, package, ud, d):
+        """
+        Match as many patterns as possible such as:
+                gnome-common-2.20.0.tar.gz (most common format)
+                gtk+-2.90.1.tar.gz
+                xf86-input-synaptics-12.6.9.tar.gz
+                dri2proto-2.3.tar.gz
+                blktool_4.orig.tar.gz
+                libid3tag-0.15.1b.tar.gz
+                unzip552.tar.gz
+                icu4c-3_6-src.tgz
+                genext2fs_1.3.orig.tar.gz
+                gst-fluendo-mp3
+        """
+        # match most patterns which uses "-" as separator to version digits
+        pn_prefix1 = "[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
+        # a loose pattern such as for unzip552.tar.gz
+        pn_prefix2 = "[a-zA-Z]+"
+        # a loose pattern such as for 80325-quicky-0.4.tar.gz
+        pn_prefix3 = "[0-9]+[-]?[a-zA-Z]+"
+        # Save the Package Name (pn) Regex for use later
+        pn_regex = "(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
+
+        # match version
+        pver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
+
+        # match arch
+        parch_regex = "-source|_all_"
+
+        # src.rpm extension was added only for rpm package. Can be removed if the rpm
+        # packaged will always be considered as having to be manually upgraded
+        psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
+
+        # match name, version and archive type of a package
+        package_regex_comp = re.compile("(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
+                                                    % (pn_regex, pver_regex, parch_regex, psuffix_regex))
+        self.suffix_regex_comp = re.compile(psuffix_regex)
+
+        # compile regex, can be specific by package or generic regex
+        pn_regex = d.getVar('UPSTREAM_CHECK_REGEX', True)
+        if pn_regex:
+            package_custom_regex_comp = re.compile(pn_regex)
+        else:
+            version = self._parse_path(package_regex_comp, package)
+            if version:
+                package_custom_regex_comp = re.compile(
+                    "(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
+                    (re.escape(version[0]), pver_regex, parch_regex, psuffix_regex))
+            else:
+                package_custom_regex_comp = None
+
+        return package_custom_regex_comp
+
+    def latest_versionstring(self, ud, d):
+        """
+        Manipulate the URL and try to obtain the latest package version
+
+        sanity check to ensure same name and type.
+        """
+        package = ud.path.split("/")[-1]
+        current_version = ['', d.getVar('PV', True), '']
+
+        """possible to have no version in pkg name, such as spectrum-fw"""
+        if not re.search("\d+", package):
+            current_version[1] = re.sub('_', '.', current_version[1])
+            current_version[1] = re.sub('-', '.', current_version[1])
+            return (current_version[1], '')
+
+        package_regex = self._init_regexes(package, ud, d)
+        if package_regex is None:
+            bb.warn("latest_versionstring: package %s don't match pattern" % (package))
+            return ('', '')
+        bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern))
+
+        uri = ""
+        regex_uri = d.getVar("UPSTREAM_CHECK_URI", True)
+        if not regex_uri:
+            path = ud.path.split(package)[0]
+
+            # search for version matches on folders inside the path, like:
+            # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
+            dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
+            m = dirver_regex.search(path)
+            if m:
+                pn = d.getVar('PN', True)
+                dirver = m.group('dirver')
+
+                dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))
+                if not dirver_pn_regex.search(dirver):
+                    return (self._check_latest_version_by_dir(dirver,
+                        package, package_regex, current_version, ud, d), '')
+
+            uri = bb.fetch.encodeurl([ud.type, ud.host, path, ud.user, ud.pswd, {}])
+        else:
+            uri = regex_uri
+
+        return (self._check_latest_version(uri, package, package_regex,
+                current_version, ud, d), '')

+ 439 - 0
bitbake/lib/bb/main.py

@@ -0,0 +1,439 @@
+#!/usr/bin/env python
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Copyright (C) 2003, 2004  Chris Larson
+# Copyright (C) 2003, 2004  Phil Blundell
+# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
+# Copyright (C) 2005        Holger Hans Peter Freyther
+# Copyright (C) 2005        ROAD GmbH
+# Copyright (C) 2006        Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import sys
+import logging
+import optparse
+import warnings
+
+import bb
+from bb import event
+import bb.msg
+from bb import cooker
+from bb import ui
+from bb import server
+from bb import cookerdata
+
+logger = logging.getLogger("BitBake")
+
+class BBMainException(Exception):
+    pass
+
+def present_options(optionlist):
+    if len(optionlist) > 1:
+        return ' or '.join([', '.join(optionlist[:-1]), optionlist[-1]])
+    else:
+        return optionlist[0]
+
+class BitbakeHelpFormatter(optparse.IndentedHelpFormatter):
+    def format_option(self, option):
+        # We need to do this here rather than in the text we supply to
+        # add_option() because we don't want to call list_extension_modules()
+        # on every execution (since it imports all of the modules)
+        # Note also that we modify option.help rather than the returned text
+        # - this is so that we don't have to re-format the text ourselves
+        if option.dest == 'ui':
+            valid_uis = list_extension_modules(bb.ui, 'main')
+            option.help = option.help.replace('@CHOICES@', present_options(valid_uis))
+        elif option.dest == 'servertype':
+            valid_server_types = list_extension_modules(bb.server, 'BitBakeServer')
+            option.help = option.help.replace('@CHOICES@', present_options(valid_server_types))
+
+        return optparse.IndentedHelpFormatter.format_option(self, option)
+
+def list_extension_modules(pkg, checkattr):
+    """
+    Lists extension modules in a specific Python package
+    (e.g. UIs, servers). NOTE: Calling this function will import all of the
+    submodules of the specified module in order to check for the specified
+    attribute; this can have unusual side-effects. As a result, this should
+    only be called when displaying help text or error messages.
+    Parameters:
+        pkg: previously imported Python package to list
+        checkattr: attribute to look for in module to determine if it's valid
+            as the type of extension you are looking for
+    """
+    import pkgutil
+    pkgdir = os.path.dirname(pkg.__file__)
+
+    modules = []
+    for _, modulename, _ in pkgutil.iter_modules([pkgdir]):
+        if os.path.isdir(os.path.join(pkgdir, modulename)):
+            # ignore directories
+            continue
+        try:
+            module = __import__(pkg.__name__, fromlist=[modulename])
+        except:
+            # If we can't import it, it's not valid
+            continue
+        module_if = getattr(module, modulename)
+        if getattr(module_if, 'hidden_extension', False):
+            continue
+        if not checkattr or hasattr(module_if, checkattr):
+            modules.append(modulename)
+    return modules
+
+def import_extension_module(pkg, modulename, checkattr):
+    try:
+        # Dynamically load the UI based on the ui name. Although we
+        # suggest a fixed set this allows you to have flexibility in which
+        # ones are available.
+        module = __import__(pkg.__name__, fromlist = [modulename])
+        return getattr(module, modulename)
+    except AttributeError:
+        raise BBMainException('FATAL: Unable to import extension module "%s" from %s. Valid extension modules: %s' % (modulename, pkg.__name__, present_options(list_extension_modules(pkg, checkattr))))
+
+
+# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
+warnlog = logging.getLogger("BitBake.Warnings")
+_warnings_showwarning = warnings.showwarning
+def _showwarning(message, category, filename, lineno, file=None, line=None):
+    if file is not None:
+        if _warnings_showwarning is not None:
+            _warnings_showwarning(message, category, filename, lineno, file, line)
+    else:
+        s = warnings.formatwarning(message, category, filename, lineno)
+        warnlog.warn(s)
+
+warnings.showwarning = _showwarning
+warnings.filterwarnings("ignore")
+warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
+warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
+warnings.filterwarnings("ignore", category=ImportWarning)
+warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
+warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
+
+class BitBakeConfigParameters(cookerdata.ConfigParameters):
+
+    def parseCommandLine(self, argv=sys.argv):
+        parser = optparse.OptionParser(
+            formatter = BitbakeHelpFormatter(),
+            version = "BitBake Build Tool Core version %s" % bb.__version__,
+            usage = """%prog [options] [recipename/target recipe:do_task ...]
+
+    Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
+    It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
+    will provide the layer, BBFILES and other configuration information.""")
+
+        parser.add_option("-b", "--buildfile", help = "Execute tasks from a specific .bb recipe directly. WARNING: Does not handle any dependencies from other recipes.",
+                   action = "store", dest = "buildfile", default = None)
+
+        parser.add_option("-k", "--continue", help = "Continue as much as possible after an error. While the target that failed and anything depending on it cannot be built, as much as possible will be built before stopping.",
+                   action = "store_false", dest = "abort", default = True)
+
+        parser.add_option("-a", "--tryaltconfigs", help = "Continue with builds by trying to use alternative providers where possible.",
+                   action = "store_true", dest = "tryaltconfigs", default = False)
+
+        parser.add_option("-f", "--force", help = "Force the specified targets/task to run (invalidating any existing stamp file).",
+                   action = "store_true", dest = "force", default = False)
+
+        parser.add_option("-c", "--cmd", help = "Specify the task to execute. The exact options available depend on the metadata. Some examples might be 'compile' or 'populate_sysroot' or 'listtasks' may give a list of the tasks available.",
+                   action = "store", dest = "cmd")
+
+        parser.add_option("-C", "--clear-stamp", help = "Invalidate the stamp for the specified task such as 'compile' and then run the default task for the specified target(s).",
+                    action = "store", dest = "invalidate_stamp")
+
+        parser.add_option("-r", "--read", help = "Read the specified file before bitbake.conf.",
+                   action = "append", dest = "prefile", default = [])
+
+        parser.add_option("-R", "--postread", help = "Read the specified file after bitbake.conf.",
+                          action = "append", dest = "postfile", default = [])
+
+        parser.add_option("-v", "--verbose", help = "Output more log message data to the terminal.",
+                   action = "store_true", dest = "verbose", default = False)
+
+        parser.add_option("-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
+                   action = "count", dest="debug", default = 0)
+
+        parser.add_option("-n", "--dry-run", help = "Don't execute, just go through the motions.",
+                   action = "store_true", dest = "dry_run", default = False)
+
+        parser.add_option("-S", "--dump-signatures", help = "Dump out the signature construction information, with no task execution. The SIGNATURE_HANDLER parameter is passed to the handler. Two common values are none and printdiff but the handler may define more/less. none means only dump the signature, printdiff means compare the dumped signature with the cached one.",
+                   action = "append", dest = "dump_signatures", default = [], metavar="SIGNATURE_HANDLER")
+
+        parser.add_option("-p", "--parse-only", help = "Quit after parsing the BB recipes.",
+                   action = "store_true", dest = "parse_only", default = False)
+
+        parser.add_option("-s", "--show-versions", help = "Show current and preferred versions of all recipes.",
+                   action = "store_true", dest = "show_versions", default = False)
+
+        parser.add_option("-e", "--environment", help = "Show the global or per-recipe environment complete with information about where variables were set/changed.",
+                   action = "store_true", dest = "show_environment", default = False)
+
+        parser.add_option("-g", "--graphviz", help = "Save dependency tree information for the specified targets in the dot syntax.",
+                    action = "store_true", dest = "dot_graph", default = False)
+
+        parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
+                    action = "append", dest = "extra_assume_provided", default = [])
+
+        parser.add_option("-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
+                    action = "append", dest = "debug_domains", default = [])
+
+        parser.add_option("-P", "--profile", help = "Profile the command and save reports.",
+                   action = "store_true", dest = "profile", default = False)
+
+        env_ui = os.environ.get('BITBAKE_UI', None)
+        default_ui = env_ui or 'knotty'
+        # @CHOICES@ is substituted out by BitbakeHelpFormatter above
+        parser.add_option("-u", "--ui", help = "The user interface to use (@CHOICES@ - default %default).",
+                   action="store", dest="ui", default=default_ui)
+
+        # @CHOICES@ is substituted out by BitbakeHelpFormatter above
+        parser.add_option("-t", "--servertype", help = "Choose which server type to use (@CHOICES@ - default %default).",
+                   action = "store", dest = "servertype", default = "process")
+
+        parser.add_option("", "--token", help = "Specify the connection token to be used when connecting to a remote server.",
+                   action = "store", dest = "xmlrpctoken")
+
+        parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not.",
+                   action = "store_true", dest = "revisions_changed", default = False)
+
+        parser.add_option("", "--server-only", help = "Run bitbake without a UI, only starting a server (cooker) process.",
+                   action = "store_true", dest = "server_only", default = False)
+
+        parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to.",
+                   action = "store", dest = "bind", default = False)
+
+        parser.add_option("", "--no-setscene", help = "Do not run any setscene tasks. sstate will be ignored and everything needed, built.",
+                   action = "store_true", dest = "nosetscene", default = False)
+
+        parser.add_option("", "--setscene-only", help = "Only run setscene tasks, don't run any real tasks.",
+                   action = "store_true", dest = "setsceneonly", default = False)
+
+        parser.add_option("", "--remote-server", help = "Connect to the specified server.",
+                   action = "store", dest = "remote_server", default = False)
+
+        parser.add_option("-m", "--kill-server", help = "Terminate the remote server.",
+                    action = "store_true", dest = "kill_server", default = False)
+
+        parser.add_option("", "--observe-only", help = "Connect to a server as an observing-only client.",
+                   action = "store_true", dest = "observe_only", default = False)
+
+        parser.add_option("", "--status-only", help = "Check the status of the remote bitbake server.",
+                   action = "store_true", dest = "status_only", default = False)
+
+        parser.add_option("-w", "--write-log", help = "Writes the event log of the build to a bitbake event json file. Use '' (empty string) to assign the name automatically.",
+                   action = "store", dest = "writeeventlog")
+
+        options, targets = parser.parse_args(argv)
+
+        # some environmental variables set also configuration options
+        if "BBSERVER" in os.environ:
+            options.servertype = "xmlrpc"
+            options.remote_server = os.environ["BBSERVER"]
+
+        if "BBTOKEN" in os.environ:
+            options.xmlrpctoken = os.environ["BBTOKEN"]
+
+        if "BBEVENTLOG" is os.environ:
+            options.writeeventlog = os.environ["BBEVENTLOG"]
+
+        # fill in proper log name if not supplied
+        if options.writeeventlog is not None and len(options.writeeventlog) == 0:
+            import datetime
+            options.writeeventlog = "bitbake_eventlog_%s.json" % datetime.datetime.now().strftime("%Y%m%d%H%M%S")
+
+        # if BBSERVER says to autodetect, let's do that
+        if options.remote_server:
+            [host, port] = options.remote_server.split(":", 2)
+            port = int(port)
+            # use automatic port if port set to -1, means read it from
+            # the bitbake.lock file; this is a bit tricky, but we always expect
+            # to be in the base of the build directory if we need to have a
+            # chance to start the server later, anyway
+            if port == -1:
+                lock_location = "./bitbake.lock"
+                # we try to read the address at all times; if the server is not started,
+                # we'll try to start it after the first connect fails, below
+                try:
+                    lf = open(lock_location, 'r')
+                    remotedef = lf.readline()
+                    [host, port] = remotedef.split(":")
+                    port = int(port)
+                    lf.close()
+                    options.remote_server = remotedef
+                except Exception as e:
+                    raise BBMainException("Failed to read bitbake.lock (%s), invalid port" % str(e))
+
+        return options, targets[1:]
+
+
+def start_server(servermodule, configParams, configuration, features):
+    server = servermodule.BitBakeServer()
+    if configParams.bind:
+        (host, port) = configParams.bind.split(':')
+        server.initServer((host, int(port)))
+        configuration.interface = [ server.serverImpl.host, server.serverImpl.port ]
+    else:
+        server.initServer()
+        configuration.interface = []
+
+    try:
+        configuration.setServerRegIdleCallback(server.getServerIdleCB())
+
+        cooker = bb.cooker.BBCooker(configuration, features)
+
+        server.addcooker(cooker)
+        server.saveConnectionDetails()
+    except Exception as e:
+        exc_info = sys.exc_info()
+        while hasattr(server, "event_queue"):
+            try:
+                import queue
+            except ImportError:
+                import Queue as queue
+            try:
+                event = server.event_queue.get(block=False)
+            except (queue.Empty, IOError):
+                break
+            if isinstance(event, logging.LogRecord):
+                logger.handle(event)
+        raise exc_info[1], None, exc_info[2]
+    server.detach()
+    cooker.lock.close()
+    return server
+
+
+def bitbake_main(configParams, configuration):
+
+    # Python multiprocessing requires /dev/shm on Linux
+    if sys.platform.startswith('linux') and not os.access('/dev/shm', os.W_OK | os.X_OK):
+        raise BBMainException("FATAL: /dev/shm does not exist or is not writable")
+
+    # Unbuffer stdout to avoid log truncation in the event
+    # of an unorderly exit as well as to provide timely
+    # updates to log files for use with tail
+    try:
+        if sys.stdout.name == '<stdout>':
+            sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
+    except:
+        pass
+
+
+    configuration.setConfigParameters(configParams)
+
+    ui_module = import_extension_module(bb.ui, configParams.ui, 'main')
+    servermodule = import_extension_module(bb.server, configParams.servertype, 'BitBakeServer')
+
+    if configParams.server_only:
+        if configParams.servertype != "xmlrpc":
+            raise BBMainException("FATAL: If '--server-only' is defined, we must set the "
+                                  "servertype as 'xmlrpc'.\n")
+        if not configParams.bind:
+            raise BBMainException("FATAL: The '--server-only' option requires a name/address "
+                                  "to bind to with the -B option.\n")
+        if configParams.remote_server:
+            raise BBMainException("FATAL: The '--server-only' option conflicts with %s.\n" %
+                                  ("the BBSERVER environment variable" if "BBSERVER" in os.environ \
+                                   else "the '--remote-server' option" ))
+
+    if configParams.bind and configParams.servertype != "xmlrpc":
+        raise BBMainException("FATAL: If '-B' or '--bind' is defined, we must "
+                              "set the servertype as 'xmlrpc'.\n")
+
+    if configParams.remote_server and configParams.servertype != "xmlrpc":
+        raise BBMainException("FATAL: If '--remote-server' is defined, we must "
+                              "set the servertype as 'xmlrpc'.\n")
+
+    if configParams.observe_only and (not configParams.remote_server or configParams.bind):
+        raise BBMainException("FATAL: '--observe-only' can only be used by UI clients "
+                              "connecting to a server.\n")
+
+    if configParams.kill_server and not configParams.remote_server:
+        raise BBMainException("FATAL: '--kill-server' can only be used to terminate a remote server")
+
+    if "BBDEBUG" in os.environ:
+        level = int(os.environ["BBDEBUG"])
+        if level > configuration.debug:
+            configuration.debug = level
+
+    bb.msg.init_msgconfig(configParams.verbose, configuration.debug,
+                         configuration.debug_domains)
+
+    # Ensure logging messages get sent to the UI as events
+    handler = bb.event.LogHandler()
+    if not configParams.status_only:
+        # In status only mode there are no logs and no UI
+        logger.addHandler(handler)
+
+    # Clear away any spurious environment variables while we stoke up the cooker
+    cleanedvars = bb.utils.clean_environment()
+
+    featureset = []
+    if not configParams.server_only:
+        # Collect the feature set for the UI
+        featureset = getattr(ui_module, "featureSet", [])
+
+    if configParams.server_only:
+        for param in ('prefile', 'postfile'):
+            value = getattr(configParams, param)
+            if value:
+                setattr(configuration, "%s_server" % param, value)
+                param = "%s_server" % param
+
+    if not configParams.remote_server:
+        # we start a server with a given configuration
+        server = start_server(servermodule, configParams, configuration, featureset)
+        bb.event.ui_queue = []
+    else:
+        # we start a stub server that is actually a XMLRPClient that connects to a real server
+        server = servermodule.BitBakeXMLRPCClient(configParams.observe_only, configParams.xmlrpctoken)
+        server.saveConnectionDetails(configParams.remote_server)
+
+
+    if not configParams.server_only:
+        try:
+            server_connection = server.establishConnection(featureset)
+        except Exception as e:
+            bb.fatal("Could not connect to server %s: %s" % (configParams.remote_server, str(e)))
+
+        if configParams.kill_server:
+            server_connection.connection.terminateServer()
+            bb.event.ui_queue = []
+            return 0
+
+        server_connection.setupEventQueue()
+
+        # Restore the environment in case the UI needs it
+        for k in cleanedvars:
+            os.environ[k] = cleanedvars[k]
+
+        logger.removeHandler(handler)
+
+
+        if configParams.status_only:
+            server_connection.terminate()
+            return 0
+
+        try:
+            return ui_module.main(server_connection.connection, server_connection.events, configParams)
+        finally:
+            bb.event.ui_queue = []
+            server_connection.terminate()
+    else:
+        print("Bitbake server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
+        return 0
+
+    return 1

+ 40 - 0
bitbake/lib/bb/methodpool.py

@@ -0,0 +1,40 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+#
+# Copyright (C)       2006 Holger Hans Peter Freyther
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+from bb.utils import better_compile, better_exec
+
+def insert_method(modulename, code, fn, lineno):
+    """
+    Add code of a module should be added. The methods
+    will be simply added, no checking will be done
+    """
+    comp = better_compile(code, modulename, fn, lineno=lineno)
+    better_exec(comp, None, code, fn)
+
+compilecache = {}
+
+def compile_cache(code):
+    h = hash(code)
+    if h in compilecache:
+        return compilecache[h]
+    return None
+
+def compile_cache_add(code, compileobj):
+    h = hash(code)
+    compilecache[h] = compileobj

+ 263 - 0
bitbake/lib/bb/monitordisk.py

@@ -0,0 +1,263 @@
+#!/usr/bin/env python
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Copyright (C) 2012 Robert Yang
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os, logging, re, sys
+import bb
+logger = logging.getLogger("BitBake.Monitor")
+
+def printErr(info):
+    logger.error("%s\n       Disk space monitor will NOT be enabled" % info)
+
+def convertGMK(unit):
+
+    """ Convert the space unit G, M, K, the unit is case-insensitive """
+
+    unitG = re.match('([1-9][0-9]*)[gG]\s?$', unit)
+    if unitG:
+        return int(unitG.group(1)) * (1024 ** 3)
+    unitM = re.match('([1-9][0-9]*)[mM]\s?$', unit)
+    if unitM:
+        return int(unitM.group(1)) * (1024 ** 2)
+    unitK = re.match('([1-9][0-9]*)[kK]\s?$', unit)
+    if unitK:
+        return int(unitK.group(1)) * 1024
+    unitN = re.match('([1-9][0-9]*)\s?$', unit)
+    if unitN:
+        return int(unitN.group(1))
+    else:
+        return None
+
+def getMountedDev(path):
+
+    """ Get the device mounted at the path, uses /proc/mounts """
+
+    # Get the mount point of the filesystem containing path
+    # st_dev is the ID of device containing file
+    parentDev = os.stat(path).st_dev
+    currentDev = parentDev
+    # When the current directory's device is different from the
+    # parent's, then the current directory is a mount point
+    while parentDev == currentDev:
+        mountPoint = path
+        # Use dirname to get the parent's directory
+        path = os.path.dirname(path)
+        # Reach the "/"
+        if path == mountPoint:
+            break
+        parentDev= os.stat(path).st_dev
+
+    try:
+        with open("/proc/mounts", "r") as ifp:
+            for line in ifp:
+                procLines = line.rstrip('\n').split()
+                if procLines[1] == mountPoint:
+                    return procLines[0]
+    except EnvironmentError:
+        pass
+    return None
+
+def getDiskData(BBDirs, configuration):
+
+    """Prepare disk data for disk space monitor"""
+
+    # Save the device IDs, need the ID to be unique (the dictionary's key is
+    # unique), so that when more than one directory is located on the same
+    # device, we just monitor it once
+    devDict = {}
+    for pathSpaceInode in BBDirs.split():
+        # The input format is: "dir,space,inode", dir is a must, space
+        # and inode are optional
+        pathSpaceInodeRe = re.match('([^,]*),([^,]*),([^,]*),?(.*)', pathSpaceInode)
+        if not pathSpaceInodeRe:
+            printErr("Invalid value in BB_DISKMON_DIRS: %s" % pathSpaceInode)
+            return None
+
+        action = pathSpaceInodeRe.group(1)
+        if action not in ("ABORT", "STOPTASKS", "WARN"):
+            printErr("Unknown disk space monitor action: %s" % action)
+            return None
+
+        path = os.path.realpath(pathSpaceInodeRe.group(2))
+        if not path:
+            printErr("Invalid path value in BB_DISKMON_DIRS: %s" % pathSpaceInode)
+            return None
+
+        # The disk space or inode is optional, but it should have a correct
+        # value once it is specified
+        minSpace = pathSpaceInodeRe.group(3)
+        if minSpace:
+            minSpace = convertGMK(minSpace)
+            if not minSpace:
+                printErr("Invalid disk space value in BB_DISKMON_DIRS: %s" % pathSpaceInodeRe.group(3))
+                return None
+        else:
+            # None means that it is not specified
+            minSpace = None
+
+        minInode = pathSpaceInodeRe.group(4)
+        if minInode:
+            minInode = convertGMK(minInode)
+            if not minInode:
+                printErr("Invalid inode value in BB_DISKMON_DIRS: %s" % pathSpaceInodeRe.group(4))
+                return None
+        else:
+            # None means that it is not specified
+            minInode = None
+
+        if minSpace is None and minInode is None:
+            printErr("No disk space or inode value in found BB_DISKMON_DIRS: %s" % pathSpaceInode)
+            return None
+        # mkdir for the directory since it may not exist, for example the
+        # DL_DIR may not exist at the very beginning
+        if not os.path.exists(path):
+            bb.utils.mkdirhier(path)
+        dev = getMountedDev(path)
+        # Use path/action as the key
+        devDict[os.path.join(path, action)] = [dev, minSpace, minInode]
+
+    return devDict
+
+def getInterval(configuration):
+
+    """ Get the disk space interval """
+
+    # The default value is 50M and 5K.
+    spaceDefault = 50 * 1024 * 1024
+    inodeDefault = 5 * 1024
+
+    interval = configuration.getVar("BB_DISKMON_WARNINTERVAL", True)
+    if not interval:
+        return spaceDefault, inodeDefault
+    else:
+        # The disk space or inode interval is optional, but it should
+        # have a correct value once it is specified
+        intervalRe = re.match('([^,]*),?\s*(.*)', interval)
+        if intervalRe:
+            intervalSpace = intervalRe.group(1)
+            if intervalSpace:
+                intervalSpace = convertGMK(intervalSpace)
+                if not intervalSpace:
+                    printErr("Invalid disk space interval value in BB_DISKMON_WARNINTERVAL: %s" % intervalRe.group(1))
+                    return None, None
+            else:
+                intervalSpace = spaceDefault
+            intervalInode = intervalRe.group(2)
+            if intervalInode:
+                intervalInode = convertGMK(intervalInode)
+                if not intervalInode:
+                    printErr("Invalid disk inode interval value in BB_DISKMON_WARNINTERVAL: %s" % intervalRe.group(2))
+                    return None, None
+            else:
+                intervalInode = inodeDefault
+            return intervalSpace, intervalInode
+        else:
+            printErr("Invalid interval value in BB_DISKMON_WARNINTERVAL: %s" % interval)
+            return None, None
+
+class diskMonitor:
+
+    """Prepare the disk space monitor data"""
+
+    def __init__(self, configuration):
+
+        self.enableMonitor = False
+        self.configuration = configuration
+
+        BBDirs = configuration.getVar("BB_DISKMON_DIRS", True) or None
+        if BBDirs:
+            self.devDict = getDiskData(BBDirs, configuration)
+            if self.devDict:
+                self.spaceInterval, self.inodeInterval = getInterval(configuration)
+                if self.spaceInterval and self.inodeInterval:
+                    self.enableMonitor = True
+                    # These are for saving the previous disk free space and inode, we
+                    # use them to avoid printing too many warning messages
+                    self.preFreeS = {}
+                    self.preFreeI = {}
+                    # This is for STOPTASKS and ABORT, to avoid printing the message
+                    # repeatedly while waiting for the tasks to finish
+                    self.checked = {}
+                    for k in self.devDict:
+                        self.preFreeS[k] = 0
+                        self.preFreeI[k] = 0
+                        self.checked[k] = False
+                    if self.spaceInterval is None and self.inodeInterval is None:
+                        self.enableMonitor = False
+
+    def check(self, rq):
+
+        """ Take action for the monitor """
+
+        if self.enableMonitor:
+            for k in self.devDict:
+                path = os.path.dirname(k)
+                action = os.path.basename(k)
+                dev = self.devDict[k][0]
+                minSpace = self.devDict[k][1]
+                minInode = self.devDict[k][2]
+
+                st = os.statvfs(path)
+
+                # The free space, float point number
+                freeSpace = st.f_bavail * st.f_frsize
+
+                if minSpace and freeSpace < minSpace:
+                    # Always show warning, the self.checked would always be False if the action is WARN
+                    if self.preFreeS[k] == 0 or self.preFreeS[k] - freeSpace > self.spaceInterval and not self.checked[k]:
+                        logger.warn("The free space of %s (%s) is running low (%.3fGB left)" % \
+                                (path, dev, freeSpace / 1024 / 1024 / 1024.0))
+                        self.preFreeS[k] = freeSpace
+
+                    if action == "STOPTASKS" and not self.checked[k]:
+                        logger.error("No new tasks can be executed since the disk space monitor action is \"STOPTASKS\"!")
+                        self.checked[k] = True
+                        rq.finish_runqueue(False)
+                        bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration)
+                    elif action == "ABORT" and not self.checked[k]:
+                        logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!")
+                        self.checked[k] = True
+                        rq.finish_runqueue(True)
+                        bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration)
+
+                # The free inodes, float point number
+                freeInode = st.f_favail
+
+                if minInode and freeInode < minInode:
+                    # Some filesystems use dynamic inodes so can't run out
+                    # (e.g. btrfs). This is reported by the inode count being 0.
+                    if st.f_files == 0:
+                        self.devDict[k][2] = None
+                        continue
+                    # Always show warning, the self.checked would always be False if the action is WARN
+                    if self.preFreeI[k] == 0 or self.preFreeI[k] - freeInode > self.inodeInterval and not self.checked[k]:
+                        logger.warn("The free inode of %s (%s) is running low (%.3fK left)" % \
+                                (path, dev, freeInode / 1024.0))
+                        self.preFreeI[k] = freeInode
+
+                    if action  == "STOPTASKS" and not self.checked[k]:
+                        logger.error("No new tasks can be executed since the disk space monitor action is \"STOPTASKS\"!")
+                        self.checked[k] = True
+                        rq.finish_runqueue(False)
+                        bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration)
+                    elif action  == "ABORT" and not self.checked[k]:
+                        logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!")
+                        self.checked[k] = True
+                        rq.finish_runqueue(True)
+                        bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration)
+        return

+ 199 - 0
bitbake/lib/bb/msg.py

@@ -0,0 +1,199 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'msg' implementation
+
+Message handling infrastructure for bitbake
+
+"""
+
+# Copyright (C) 2006        Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import sys
+import copy
+import logging
+import collections
+from itertools import groupby
+import warnings
+import bb
+import bb.event
+
+class BBLogFormatter(logging.Formatter):
+    """Formatter which ensures that our 'plain' messages (logging.INFO + 1) are used as is"""
+
+    DEBUG3 = logging.DEBUG - 2
+    DEBUG2 = logging.DEBUG - 1
+    DEBUG = logging.DEBUG
+    VERBOSE = logging.INFO - 1
+    NOTE = logging.INFO
+    PLAIN = logging.INFO + 1
+    ERROR = logging.ERROR
+    WARNING = logging.WARNING
+    CRITICAL = logging.CRITICAL
+
+    levelnames = {
+        DEBUG3   : 'DEBUG',
+        DEBUG2   : 'DEBUG',
+        DEBUG   : 'DEBUG',
+        VERBOSE: 'NOTE',
+        NOTE    : 'NOTE',
+        PLAIN  : '',
+        WARNING : 'WARNING',
+        ERROR   : 'ERROR',
+        CRITICAL: 'ERROR',
+    }
+
+    color_enabled = False
+    BASECOLOR, BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(29,38)
+
+    COLORS = {
+        DEBUG3  : CYAN,
+        DEBUG2  : CYAN,
+        DEBUG   : CYAN,
+        VERBOSE : BASECOLOR,
+        NOTE    : BASECOLOR,
+        PLAIN   : BASECOLOR,
+        WARNING : YELLOW,
+        ERROR   : RED,
+        CRITICAL: RED,
+    }
+
+    BLD = '\033[1;%dm'
+    STD = '\033[%dm'
+    RST = '\033[0m'
+
+    def getLevelName(self, levelno):
+        try:
+            return self.levelnames[levelno]
+        except KeyError:
+            self.levelnames[levelno] = value = 'Level %d' % levelno
+            return value
+
+    def format(self, record):
+        record.levelname = self.getLevelName(record.levelno)
+        if record.levelno == self.PLAIN:
+            msg = record.getMessage()
+        else:
+            if self.color_enabled:
+                record = self.colorize(record)
+            msg = logging.Formatter.format(self, record)
+
+        if hasattr(record, 'bb_exc_info'):
+            etype, value, tb = record.bb_exc_info
+            formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
+            msg += '\n' + ''.join(formatted)
+        return msg
+
+    def colorize(self, record):
+        color = self.COLORS[record.levelno]
+        if self.color_enabled and color is not None:
+            record = copy.copy(record)
+            record.levelname = "".join([self.BLD % color, record.levelname, self.RST])
+            record.msg = "".join([self.STD % color, record.msg, self.RST])
+        return record
+
+    def enable_color(self):
+        self.color_enabled = True
+
+class BBLogFilter(object):
+    def __init__(self, handler, level, debug_domains):
+        self.stdlevel = level
+        self.debug_domains = debug_domains
+        loglevel = level
+        for domain in debug_domains:
+            if debug_domains[domain] < loglevel:
+                loglevel = debug_domains[domain]
+        handler.setLevel(loglevel)
+        handler.addFilter(self)
+
+    def filter(self, record):
+        if record.levelno >= self.stdlevel:
+            return True
+        if record.name in self.debug_domains and record.levelno >= self.debug_domains[record.name]:
+            return True
+        return False
+
+class BBLogFilterStdErr(BBLogFilter):
+    def filter(self, record):
+        if not BBLogFilter.filter(self, record):
+            return False
+        if record.levelno >= logging.ERROR:
+            return True
+        return False
+
+class BBLogFilterStdOut(BBLogFilter):
+    def filter(self, record):
+        if not BBLogFilter.filter(self, record):
+            return False
+        if record.levelno < logging.ERROR:
+            return True
+        return False
+
+# Message control functions
+#
+
+loggerDefaultDebugLevel = 0
+loggerDefaultVerbose = False
+loggerVerboseLogs = False
+loggerDefaultDomains = []
+
+def init_msgconfig(verbose, debug, debug_domains=None):
+    """
+    Set default verbosity and debug levels config the logger
+    """
+    bb.msg.loggerDefaultDebugLevel = debug
+    bb.msg.loggerDefaultVerbose = verbose
+    if verbose:
+        bb.msg.loggerVerboseLogs = True
+    if debug_domains:
+        bb.msg.loggerDefaultDomains = debug_domains
+    else:
+        bb.msg.loggerDefaultDomains = []
+
+def constructLogOptions():
+    debug = loggerDefaultDebugLevel
+    verbose = loggerDefaultVerbose
+    domains = loggerDefaultDomains
+
+    if debug:
+        level = BBLogFormatter.DEBUG - debug + 1
+    elif verbose:
+        level = BBLogFormatter.VERBOSE
+    else:
+        level = BBLogFormatter.NOTE
+
+    debug_domains = {}
+    for (domainarg, iterator) in groupby(domains):
+        dlevel = len(tuple(iterator))
+        debug_domains["BitBake.%s" % domainarg] = logging.DEBUG - dlevel + 1
+    return level, debug_domains
+
+def addDefaultlogFilter(handler, cls = BBLogFilter):
+    level, debug_domains = constructLogOptions()
+
+    cls(handler, level, debug_domains)
+
+#
+# Message handling functions
+#
+
+def fatal(msgdomain, msg):
+    if msgdomain:
+        logger = logging.getLogger("BitBake.%s" % msgdomain)
+    else:
+        logger = logging.getLogger("BitBake")
+    logger.critical(msg)
+    sys.exit(1)

+ 255 - 0
bitbake/lib/bb/namedtuple_with_abc.py

@@ -0,0 +1,255 @@
+# http://code.activestate.com/recipes/577629-namedtupleabc-abstract-base-class-mix-in-for-named/
+#!/usr/bin/env python
+# Copyright (c) 2011 Jan Kaliszewski (zuo). Available under the MIT License.
+
+"""
+namedtuple_with_abc.py:
+* named tuple mix-in + ABC (abstract base class) recipe,
+* works under Python 2.6, 2.7 as well as 3.x.
+
+Import this module to patch collections.namedtuple() factory function
+-- enriching it with the 'abc' attribute (an abstract base class + mix-in
+for named tuples) and decorating it with a wrapper that registers each
+newly created named tuple as a subclass of namedtuple.abc.
+
+How to import:
+    import collections, namedtuple_with_abc
+or:
+    import namedtuple_with_abc
+    from collections import namedtuple
+    # ^ in this variant you must import namedtuple function
+    #   *after* importing namedtuple_with_abc module
+or simply:
+    from namedtuple_with_abc import namedtuple
+
+Simple usage example:
+    class Credentials(namedtuple.abc):
+        _fields = 'username password'
+        def __str__(self):
+            return ('{0.__class__.__name__}'
+                    '(username={0.username}, password=...)'.format(self))
+    print(Credentials("alice", "Alice's password"))
+
+For more advanced examples -- see below the "if __name__ == '__main__':".
+"""
+
+import collections
+from abc import ABCMeta, abstractproperty
+from functools import wraps
+from sys import version_info
+
+__all__ = ('namedtuple',)
+_namedtuple = collections.namedtuple
+
+
+class _NamedTupleABCMeta(ABCMeta):
+    '''The metaclass for the abstract base class + mix-in for named tuples.'''
+    def __new__(mcls, name, bases, namespace):
+        fields = namespace.get('_fields')
+        for base in bases:
+            if fields is not None:
+                break
+            fields = getattr(base, '_fields', None)
+        if not isinstance(fields, abstractproperty):
+            basetuple = _namedtuple(name, fields)
+            bases = (basetuple,) + bases
+            namespace.pop('_fields', None)
+            namespace.setdefault('__doc__', basetuple.__doc__)
+            namespace.setdefault('__slots__', ())
+        return ABCMeta.__new__(mcls, name, bases, namespace)
+
+
+exec(
+    # Python 2.x metaclass declaration syntax
+    """class _NamedTupleABC(object):
+        '''The abstract base class + mix-in for named tuples.'''
+        __metaclass__ = _NamedTupleABCMeta
+        _fields = abstractproperty()""" if version_info[0] < 3 else
+    # Python 3.x metaclass declaration syntax
+    """class _NamedTupleABC(metaclass=_NamedTupleABCMeta):
+        '''The abstract base class + mix-in for named tuples.'''
+        _fields = abstractproperty()"""
+)
+
+
+_namedtuple.abc = _NamedTupleABC
+#_NamedTupleABC.register(type(version_info))  # (and similar, in the future...)
+
+@wraps(_namedtuple)
+def namedtuple(*args, **kwargs):
+    '''Named tuple factory with namedtuple.abc subclass registration.'''
+    cls = _namedtuple(*args, **kwargs)
+    _NamedTupleABC.register(cls)
+    return cls
+
+collections.namedtuple = namedtuple
+
+
+
+
+if __name__ == '__main__':
+
+    '''Examples and explanations'''
+
+    # Simple usage
+
+    class MyRecord(namedtuple.abc):
+        _fields = 'x y z'  # such form will be transformed into ('x', 'y', 'z')
+        def _my_custom_method(self):
+            return list(self._asdict().items())
+    # (the '_fields' attribute belongs to the named tuple public API anyway)
+
+    rec = MyRecord(1, 2, 3)
+    print(rec)
+    print(rec._my_custom_method())
+    print(rec._replace(y=222))
+    print(rec._replace(y=222)._my_custom_method())
+
+    # Custom abstract classes...
+
+    class MyAbstractRecord(namedtuple.abc):
+        def _my_custom_method(self):
+            return list(self._asdict().items())
+
+    try:
+        MyAbstractRecord()  # (abstract classes cannot be instantiated)
+    except TypeError as exc:
+        print(exc)
+
+    class AnotherAbstractRecord(MyAbstractRecord):
+        def __str__(self):
+            return '<<<{0}>>>'.format(super(AnotherAbstractRecord,
+                                            self).__str__())
+
+    # ...and their non-abstract subclasses
+
+    class MyRecord2(MyAbstractRecord):
+        _fields = 'a, b'
+
+    class MyRecord3(AnotherAbstractRecord):
+        _fields = 'p', 'q', 'r'
+
+    rec2 = MyRecord2('foo', 'bar')
+    print(rec2)
+    print(rec2._my_custom_method())
+    print(rec2._replace(b=222))
+    print(rec2._replace(b=222)._my_custom_method())
+
+    rec3 = MyRecord3('foo', 'bar', 'baz')
+    print(rec3)
+    print(rec3._my_custom_method())
+    print(rec3._replace(q=222))
+    print(rec3._replace(q=222)._my_custom_method())
+
+   # You can also subclass non-abstract ones...
+
+    class MyRecord33(MyRecord3):
+        def __str__(self):
+            return '< {0!r}, ..., {0!r} >'.format(self.p, self.r)
+
+    rec33 = MyRecord33('foo', 'bar', 'baz')
+    print(rec33)
+    print(rec33._my_custom_method())
+    print(rec33._replace(q=222))
+    print(rec33._replace(q=222)._my_custom_method())
+
+    # ...and even override the magic '_fields' attribute again
+
+    class MyRecord345(MyRecord3):
+        _fields = 'e f g h i j k'
+
+    rec345 = MyRecord345(1, 2, 3, 4, 3, 2, 1)
+    print(rec345)
+    print(rec345._my_custom_method())
+    print(rec345._replace(f=222))
+    print(rec345._replace(f=222)._my_custom_method())
+
+    # Mixing-in some other classes is also possible:
+
+    class MyMixIn(object):
+        def method(self):
+            return "MyMixIn.method() called"
+        def _my_custom_method(self):
+            return "MyMixIn._my_custom_method() called"
+        def count(self, item):
+            return "MyMixIn.count({0}) called".format(item)
+        def _asdict(self):  # (cannot override a namedtuple method, see below)
+            return "MyMixIn._asdict() called"
+
+    class MyRecord4(MyRecord33, MyMixIn):  # mix-in on the right
+        _fields = 'j k l x'
+
+    class MyRecord5(MyMixIn, MyRecord33):  # mix-in on the left
+        _fields = 'j k l x y'
+
+    rec4 = MyRecord4(1, 2, 3, 2)
+    print(rec4)
+    print(rec4.method())
+    print(rec4._my_custom_method())  # MyRecord33's
+    print(rec4.count(2))  # tuple's
+    print(rec4._replace(k=222))
+    print(rec4._replace(k=222).method())
+    print(rec4._replace(k=222)._my_custom_method())  # MyRecord33's
+    print(rec4._replace(k=222).count(8))  # tuple's
+
+    rec5 = MyRecord5(1, 2, 3, 2, 1)
+    print(rec5)
+    print(rec5.method())
+    print(rec5._my_custom_method())  # MyMixIn's
+    print(rec5.count(2))  # MyMixIn's
+    print(rec5._replace(k=222))
+    print(rec5._replace(k=222).method())
+    print(rec5._replace(k=222)._my_custom_method())  # MyMixIn's
+    print(rec5._replace(k=222).count(2))  # MyMixIn's
+
+    # Note that behavior: the standard namedtuple methods cannot be
+    # overridden by a foreign mix-in -- even if the mix-in is declared
+    # as the leftmost base class (but, obviously, you can override them
+    # in the defined class or its subclasses):
+
+    print(rec4._asdict())  # (returns a dict, not "MyMixIn._asdict() called")
+    print(rec5._asdict())  # (returns a dict, not "MyMixIn._asdict() called")
+
+    class MyRecord6(MyRecord33):
+        _fields = 'j k l x y z'
+        def _asdict(self):
+            return "MyRecord6._asdict() called"
+    rec6 = MyRecord6(1, 2, 3, 1, 2, 3)
+    print(rec6._asdict())  # (this returns "MyRecord6._asdict() called")
+
+    # All that record classes are real subclasses of namedtuple.abc:
+
+    assert issubclass(MyRecord, namedtuple.abc)
+    assert issubclass(MyAbstractRecord, namedtuple.abc)
+    assert issubclass(AnotherAbstractRecord, namedtuple.abc)
+    assert issubclass(MyRecord2, namedtuple.abc)
+    assert issubclass(MyRecord3, namedtuple.abc)
+    assert issubclass(MyRecord33, namedtuple.abc)
+    assert issubclass(MyRecord345, namedtuple.abc)
+    assert issubclass(MyRecord4, namedtuple.abc)
+    assert issubclass(MyRecord5, namedtuple.abc)
+    assert issubclass(MyRecord6, namedtuple.abc)
+
+    # ...but abstract ones are not subclasses of tuple
+    # (and this is what you probably want):
+
+    assert not issubclass(MyAbstractRecord, tuple)
+    assert not issubclass(AnotherAbstractRecord, tuple)
+
+    assert issubclass(MyRecord, tuple)
+    assert issubclass(MyRecord2, tuple)
+    assert issubclass(MyRecord3, tuple)
+    assert issubclass(MyRecord33, tuple)
+    assert issubclass(MyRecord345, tuple)
+    assert issubclass(MyRecord4, tuple)
+    assert issubclass(MyRecord5, tuple)
+    assert issubclass(MyRecord6, tuple)
+
+    # Named tuple classes created with namedtuple() factory function
+    # (in the "traditional" way) are registered as "virtual" subclasses
+    # of namedtuple.abc:
+
+    MyTuple = namedtuple('MyTuple', 'a b c')
+    mt = MyTuple(1, 2, 3)
+    assert issubclass(MyTuple, namedtuple.abc)
+    assert isinstance(mt, namedtuple.abc)

Some files were not shown because too many files changed in this diff