Browse Source

bitbake: update to version 1.44.0

Found that the gitsm fetcher we ship is rather out-dated compared to
upstream and was failing to pull git sub-modules from repositories
such as azure-umqtt-c.

It shall be noted that bitbake has renamed its multiconfig targets:
the "multiconfig:" prefix was changed to "mc:". Updated our scripts
as well as documentation.

    Origin: https://github.com/openembedded/bitbake.git
    Commit 5d83d828cacb58ccb7c464e799c85fd2d2a50ccc (tag: 1.44.0)
    Author: Richard Purdie <richard.purdie@linuxfoundation.org>
    Date:   Wed Oct 9 14:10:21 2019 +0100

    bitbake: Update to version 1.44.0

    Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>

Signed-off-by: Cedric Hombourger <Cedric_Hombourger@mentor.com>
Cedric Hombourger 5 years ago
parent
commit
c2aae24527
100 changed files with 2772 additions and 4773 deletions
  1. 16 0
      RECIPE-API-CHANGELOG.md
  2. 2 0
      bitbake/.gitattributes
  3. 0 19
      bitbake/HEADER
  4. 11 1
      bitbake/LICENSE
  5. 5 56
      bitbake/LICENSE.GPL-2.0-only
  6. 25 0
      bitbake/LICENSE.MIT
  7. 3 2
      bitbake/MANIFEST.in
  8. 2 14
      bitbake/bin/bitbake
  9. 85 70
      bitbake/bin/bitbake-diffsigs
  10. 0 94
      bitbake/bin/bitbake-dumpsig
  11. 1 0
      bitbake/bin/bitbake-dumpsig
  12. 170 0
      bitbake/bin/bitbake-hashclient
  13. 62 0
      bitbake/bin/bitbake-hashserv
  14. 1 11
      bitbake/bin/bitbake-layers
  15. 4 0
      bitbake/bin/bitbake-prserv
  16. 5 11
      bitbake/bin/bitbake-selftest
  17. 15 4
      bitbake/bin/bitbake-worker
  18. 1 13
      bitbake/bin/bitdoc
  19. 4 0
      bitbake/bin/git-make-shallow
  20. 1 11
      bitbake/bin/toaster
  21. 2 15
      bitbake/bin/toaster-eventreplay
  22. 1 1
      bitbake/classes/base.bbclass
  23. 0 2
      bitbake/contrib/dump_cache.py
  24. 39 39
      bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.xml
  25. 40 29
      bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml
  26. 14 14
      bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml
  27. 9 9
      bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.xml
  28. 126 53
      bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml
  29. 137 137
      bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml
  30. 0 8
      bitbake/doc/poky.ent
  31. 0 15
      bitbake/lib/bb/COW.py
  32. 2 14
      bitbake/lib/bb/__init__.py
  33. 78 73
      bitbake/lib/bb/build.py
  34. 78 31
      bitbake/lib/bb/cache.py
  35. 1 13
      bitbake/lib/bb/cache_extra.py
  36. 1 11
      bitbake/lib/bb/checksum.py
  37. 9 4
      bitbake/lib/bb/codeparser.py
  38. 1 11
      bitbake/lib/bb/command.py
  39. 4 0
      bitbake/lib/bb/compat.py
  40. 111 107
      bitbake/lib/bb/cooker.py
  41. 25 19
      bitbake/lib/bb/cookerdata.py
  42. 4 0
      bitbake/lib/bb/daemonize.py
  43. 4 19
      bitbake/lib/bb/data.py
  44. 7 18
      bitbake/lib/bb/data_smart.py
  45. 3 30
      bitbake/lib/bb/event.py
  46. 3 0
      bitbake/lib/bb/exceptions.py
  47. 26 29
      bitbake/lib/bb/fetch2/__init__.py
  48. 1 11
      bitbake/lib/bb/fetch2/bzr.py
  49. 4 15
      bitbake/lib/bb/fetch2/clearcase.py
  50. 2 15
      bitbake/lib/bb/fetch2/cvs.py
  51. 47 21
      bitbake/lib/bb/fetch2/git.py
  52. 1 13
      bitbake/lib/bb/fetch2/gitannex.py
  53. 123 158
      bitbake/lib/bb/fetch2/gitsm.py
  54. 3 15
      bitbake/lib/bb/fetch2/hg.py
  55. 2 14
      bitbake/lib/bb/fetch2/local.py
  56. 31 38
      bitbake/lib/bb/fetch2/npm.py
  57. 3 2
      bitbake/lib/bb/fetch2/osc.py
  58. 1 14
      bitbake/lib/bb/fetch2/perforce.py
  59. 2 14
      bitbake/lib/bb/fetch2/repo.py
  60. 1 14
      bitbake/lib/bb/fetch2/s3.py
  61. 1 14
      bitbake/lib/bb/fetch2/sftp.py
  62. 1 13
      bitbake/lib/bb/fetch2/ssh.py
  63. 56 42
      bitbake/lib/bb/fetch2/svn.py
  64. 43 61
      bitbake/lib/bb/fetch2/wget.py
  65. 15 26
      bitbake/lib/bb/main.py
  66. 1 14
      bitbake/lib/bb/methodpool.py
  67. 7 20
      bitbake/lib/bb/monitordisk.py
  68. 1 13
      bitbake/lib/bb/msg.py
  69. 3 1
      bitbake/lib/bb/namedtuple_with_abc.py
  70. 2 12
      bitbake/lib/bb/parse/__init__.py
  71. 1 14
      bitbake/lib/bb/parse/ast.py
  72. 36 23
      bitbake/lib/bb/parse/parse_py/BBHandler.py
  73. 2 15
      bitbake/lib/bb/parse/parse_py/ConfHandler.py
  74. 2 15
      bitbake/lib/bb/parse/parse_py/__init__.py
  75. 159 75
      bitbake/lib/bb/persist_data.py
  76. 4 0
      bitbake/lib/bb/process.py
  77. 16 12
      bitbake/lib/bb/progress.py
  78. 3 15
      bitbake/lib/bb/providers.py
  79. 0 710
      bitbake/lib/bb/pysh/builtin.py
  80. 0 1367
      bitbake/lib/bb/pysh/interp.py
  81. 0 116
      bitbake/lib/bb/pysh/lsprof.py
  82. 0 167
      bitbake/lib/bb/pysh/pysh.py
  83. 0 5
      bitbake/lib/bb/pysh/pyshlex.py
  84. 10 7
      bitbake/lib/bb/pysh/pyshyacc.py
  85. 0 26
      bitbake/lib/bb/pysh/sherrors.py
  86. 0 77
      bitbake/lib/bb/pysh/subprocess_fix.py
  87. 1 11
      bitbake/lib/bb/remotedata.py
  88. 364 284
      bitbake/lib/bb/runqueue.py
  89. 1 13
      bitbake/lib/bb/server/__init__.py
  90. 77 46
      bitbake/lib/bb/server/process.py
  91. 1 11
      bitbake/lib/bb/server/xmlrpcclient.py
  92. 1 11
      bitbake/lib/bb/server/xmlrpcserver.py
  93. 281 69
      bitbake/lib/bb/siggen.py
  94. 2 15
      bitbake/lib/bb/taskdata.py
  95. 8 14
      bitbake/lib/bb/tests/codeparser.py
  96. 1 14
      bitbake/lib/bb/tests/cooker.py
  97. 2 15
      bitbake/lib/bb/tests/cow.py
  98. 24 15
      bitbake/lib/bb/tests/data.py
  99. 1 22
      bitbake/lib/bb/tests/event.py
  100. 286 22
      bitbake/lib/bb/tests/fetch.py

+ 16 - 0
RECIPE-API-CHANGELOG.md

@@ -183,6 +183,7 @@ Otherwise set a encrypted root password like this:
 USERS += "root"
 USER_root[password] = "$6$rounds=10000$RXeWrnFmkY$DtuS/OmsAS2cCEDo0BF5qQsizIrq6jPgXnwv3PHqREJeKd1sXdHX/ayQtuQWVDHe0KIO0/sVH8dvQm1KthF0d/"
 ```
+
 ### Use FILESEXTRAPATHS to add custom paths to FILESPATH
 
 Direct modification of FILESPATH variable is discouraged. Use FILESEXTRAPATHS
@@ -192,4 +193,19 @@ files and patches using bbappend a lot easier.
 For example:
 ```
 FILESEXTRAPATHS_prepend := "$THISDIR/files:"
+
+### multiconfig build targets were renamed
+
+bitbake was upgraded to version 1.44.0 where "multiconfig" build targets were
+renamed "mc". As an example, builds for the qemuarm-stretch machine should now
+be done as follows:
+
+```
+bitbake mc:qemuarm-stretch:isar-image-base
+```
+
+The old syntax is no longer supported and will produce an error:
+
+```
+bitbake multiconfig:qemuarm-stretch:isar-image-base
 ```

+ 2 - 0
bitbake/.gitattributes

@@ -0,0 +1,2 @@
+*min.js binary
+*min.css binary

+ 0 - 19
bitbake/HEADER

@@ -1,19 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# <one line to give the program's name and a brief idea of what it does.>
-# Copyright (C) <year>  <name of author>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-

+ 11 - 1
bitbake/LICENSE

@@ -1,4 +1,13 @@
-BitBake is licensed under the GNU General Public License version 2.0. See COPYING for further details.
+BitBake is licensed under the GNU General Public License version 2.0. See 
+LICENSE.GPL-2.0-only for further details.
+
+Individual files contain the following style tags instead of the full license text:
+
+    SPDX-License-Identifier:	GPL-2.0-only
+
+This enables machine processing of license information based on the SPDX
+License Identifiers that are here available: http://spdx.org/licenses/
+
 
 The following external components are distributed with this software:
 
@@ -17,3 +26,4 @@ Foundation and individual contributors.
 * Font Awesome fonts redistributed under the SIL Open Font License 1.1
 
 * simplediff is distributed under the zlib license.
+

+ 5 - 56
bitbake/COPYING → bitbake/LICENSE.GPL-2.0-only

@@ -279,61 +279,10 @@ POSSIBILITY OF SUCH DAMAGES.
 
 		     END OF TERMS AND CONDITIONS
 
-	    How to Apply These Terms to Your New Programs
+Note:
+Individual files contain the following tag instead of the full license text.
 
-  If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
+    SPDX-License-Identifier: GPL-2.0-only
 
-  To do so, attach the following notices to the program.  It is safest
-to attach them to the start of each source file to most effectively
-convey the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
-    <one line to give the program's name and a brief idea of what it does.>
-    Copyright (C) <year>  <name of author>
-
-    This program is free software; you can redistribute it and/or modify
-    it under the terms of the GNU General Public License as published by
-    the Free Software Foundation; either version 2 of the License, or
-    (at your option) any later version.
-
-    This program is distributed in the hope that it will be useful,
-    but WITHOUT ANY WARRANTY; without even the implied warranty of
-    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-    GNU General Public License for more details.
-
-    You should have received a copy of the GNU General Public License along
-    with this program; if not, write to the Free Software Foundation, Inc.,
-    51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-Also add information on how to contact you by electronic and paper mail.
-
-If the program is interactive, make it output a short notice like this
-when it starts in an interactive mode:
-
-    Gnomovision version 69, Copyright (C) year name of author
-    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
-    This is free software, and you are welcome to redistribute it
-    under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w' and `show c' should show the appropriate
-parts of the General Public License.  Of course, the commands you use may
-be called something other than `show w' and `show c'; they could even be
-mouse-clicks or menu items--whatever suits your program.
-
-You should also get your employer (if you work as a programmer) or your
-school, if any, to sign a "copyright disclaimer" for the program, if
-necessary.  Here is a sample; alter the names:
-
-  Yoyodyne, Inc., hereby disclaims all copyright interest in the program
-  `Gnomovision' (which makes passes at compilers) written by James Hacker.
-
-  <signature of Ty Coon>, 1 April 1989
-  Ty Coon, President of Vice
-
-This General Public License does not permit incorporating your program into
-proprietary programs.  If your program is a subroutine library, you may
-consider it more useful to permit linking proprietary applications with the
-library.  If this is what you want to do, use the GNU Lesser General
-Public License instead of this License.
+This enables machine processing of license information based on the SPDX
+License Identifiers that are here available: http://spdx.org/licenses/

+ 25 - 0
bitbake/LICENSE.MIT

@@ -0,0 +1,25 @@
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+Note:
+Individual files contain the following tag instead of the full license text.
+
+    SPDX-License-Identifier: MIT
+
+This enables machine processing of license information based on the SPDX
+License Identifiers that are here available: http://spdx.org/licenses/

+ 3 - 2
bitbake/MANIFEST.in

@@ -1,6 +1,8 @@
-include COPYING
 include ChangeLog
 include AUTHORS
+include LICENSE
+include LICENSE.GPL-2.0-only
+include LICENSE.MIT
 include contrib/*
 include contrib/vim/*/*
 include conf/*
@@ -8,4 +10,3 @@ include classes/*
 include doc/*
 include doc/manual/*
 include ez_setup.py
-include HEADER

+ 2 - 14
bitbake/bin/bitbake

@@ -1,6 +1,4 @@
 #!/usr/bin/env python3
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # Copyright (C) 2003, 2004  Chris Larson
 # Copyright (C) 2003, 2004  Phil Blundell
@@ -9,18 +7,8 @@
 # Copyright (C) 2005        ROAD GmbH
 # Copyright (C) 2006        Richard Purdie
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import os
 import sys
@@ -38,7 +26,7 @@ from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
 if sys.getfilesystemencoding() != "utf-8":
     sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\nPython can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
 
-__version__ = "1.40.0"
+__version__ = "1.44.0"
 
 if __name__ == "__main__":
     if __version__ != bb.__version__:

+ 85 - 70
bitbake/bin/bitbake-diffsigs

@@ -1,27 +1,16 @@
 #!/usr/bin/env python3
 
-# bitbake-diffsigs
-# BitBake task signature data comparison utility
+# bitbake-diffsigs / bitbake-dumpsig
+# BitBake task signature data dump and comparison utility
 #
 # Copyright (C) 2012-2013, 2017 Intel Corporation
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import os
 import sys
 import warnings
-import fnmatch
 import argparse
 import logging
 import pickle
@@ -32,7 +21,10 @@ import bb.tinfoil
 import bb.siggen
 import bb.msg
 
-logger = bb.msg.logger_create('bitbake-diffsigs')
+myname = os.path.basename(sys.argv[0])
+logger = bb.msg.logger_create(myname)
+
+is_dump = myname == 'bitbake-dumpsig'
 
 def find_siginfo(tinfoil, pn, taskname, sigs=None):
     result = None
@@ -59,8 +51,8 @@ def find_siginfo(tinfoil, pn, taskname, sigs=None):
         sys.exit(2)
     return result
 
-def find_compare_task(bbhandler, pn, taskname, sig1=None, sig2=None, color=False):
-    """ Find the most recent signature files for the specified PN/task and compare them """
+def find_siginfo_task(bbhandler, pn, taskname, sig1=None, sig2=None):
+    """ Find the most recent signature files for the specified PN/task """
 
     if not taskname.startswith('do_'):
         taskname = 'do_%s' % taskname
@@ -79,73 +71,81 @@ def find_compare_task(bbhandler, pn, taskname, sig1=None, sig2=None, color=False
         latestfiles = [sigfiles[sig1], sigfiles[sig2]]
     else:
         filedates = find_siginfo(bbhandler, pn, taskname)
-        latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-3:]
+        latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-2:]
         if not latestfiles:
             logger.error('No sigdata files found matching %s %s' % (pn, taskname))
             sys.exit(1)
-        elif len(latestfiles) < 2:
-            logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (pn, taskname))
-            sys.exit(1)
 
-    # Define recursion callback
-    def recursecb(key, hash1, hash2):
-        hashes = [hash1, hash2]
-        hashfiles = find_siginfo(bbhandler, key, None, hashes)
-
-        recout = []
-        if len(hashfiles) == 0:
-            recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
-        elif not hash1 in hashfiles:
-            recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash1))
-        elif not hash2 in hashfiles:
-            recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash2))
-        else:
-            out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb, color=color)
-            for change in out2:
-                for line in change.splitlines():
-                    recout.append('  ' + line)
+    return latestfiles
+
 
-        return recout
+# Define recursion callback
+def recursecb(key, hash1, hash2):
+    hashes = [hash1, hash2]
+    hashfiles = find_siginfo(tinfoil, key, None, hashes)
 
-    # Recurse into signature comparison
-    logger.debug("Signature file (previous): %s" % latestfiles[-2])
-    logger.debug("Signature file (latest): %s" % latestfiles[-1])
-    output = bb.siggen.compare_sigfiles(latestfiles[-2], latestfiles[-1], recursecb, color=color)
-    if output:
-        print('\n'.join(output))
-    sys.exit(0)
+    recout = []
+    if len(hashfiles) == 0:
+        recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
+    elif not hash1 in hashfiles:
+        recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash1))
+    elif not hash2 in hashfiles:
+        recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash2))
+    else:
+        out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb, color=color)
+        for change in out2:
+            for line in change.splitlines():
+                recout.append('    ' + line)
 
+    return recout
 
 
 parser = argparse.ArgumentParser(
-    description="Compares siginfo/sigdata files written out by BitBake")
+    description=("Dumps" if is_dump else "Compares") + " siginfo/sigdata files written out by BitBake")
 
-parser.add_argument('-d', '--debug',
+parser.add_argument('-D', '--debug',
                     help='Enable debug output',
                     action='store_true')
 
-parser.add_argument('--color',
-        help='Colorize output (where %(metavar)s is %(choices)s)',
-        choices=['auto', 'always', 'never'], default='auto', metavar='color')
+if is_dump:
+    parser.add_argument("-t", "--task",
+            help="find the signature data file for the last run of the specified task",
+            action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname'))
+
+    parser.add_argument("sigdatafile1",
+            help="Signature file to dump. Not used when using -t/--task.",
+            action="store", nargs='?', metavar="sigdatafile")
+else:
+    parser.add_argument('-c', '--color',
+            help='Colorize the output (where %(metavar)s is %(choices)s)',
+            choices=['auto', 'always', 'never'], default='auto', metavar='color')
 
-parser.add_argument("-t", "--task",
-        help="find the signature data files for last two runs of the specified task and compare them",
-        action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname'))
+    parser.add_argument('-d', '--dump',
+            help='Dump the last signature data instead of comparing (equivalent to using bitbake-dumpsig)',
+            action='store_true')
 
-parser.add_argument("-s", "--signature",
-        help="With -t/--task, specify the signatures to look for instead of taking the last two",
-        action="store", dest="sigargs", nargs=2, metavar=('fromsig', 'tosig'))
+    parser.add_argument("-t", "--task",
+            help="find the signature data files for the last two runs of the specified task and compare them",
+            action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname'))
 
-parser.add_argument("sigdatafile1",
-        help="First signature file to compare (or signature file to dump, if second not specified). Not used when using -t/--task.",
-        action="store", nargs='?')
+    parser.add_argument("-s", "--signature",
+            help="With -t/--task, specify the signatures to look for instead of taking the last two",
+            action="store", dest="sigargs", nargs=2, metavar=('fromsig', 'tosig'))
 
-parser.add_argument("sigdatafile2",
-        help="Second signature file to compare",
-        action="store", nargs='?')
+    parser.add_argument("sigdatafile1",
+            help="First signature file to compare (or signature file to dump, if second not specified). Not used when using -t/--task.",
+            action="store", nargs='?')
 
+    parser.add_argument("sigdatafile2",
+            help="Second signature file to compare",
+            action="store", nargs='?')
 
 options = parser.parse_args()
+if is_dump:
+    options.color = 'never'
+    options.dump = True
+    options.sigdatafile2 = None
+    options.sigargs = None
 
 if options.debug:
     logger.setLevel(logging.DEBUG)
@@ -155,17 +155,32 @@ color = (options.color == 'always' or (options.color == 'auto' and sys.stdout.is
 if options.taskargs:
     with bb.tinfoil.Tinfoil() as tinfoil:
         tinfoil.prepare(config_only=True)
-        if options.sigargs:
-            find_compare_task(tinfoil, options.taskargs[0], options.taskargs[1], options.sigargs[0], options.sigargs[1], color=color)
+        if not options.dump and options.sigargs:
+            files = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1], options.sigargs[0], options.sigargs[1])
+        else:
+            files = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1])
+
+        if options.dump:
+            logger.debug("Signature file: %s" % files[-1])
+            output = bb.siggen.dump_sigfile(files[-1])
         else:
-            find_compare_task(tinfoil, options.taskargs[0], options.taskargs[1], color=color)
+            if len(files) < 2:
+                logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (options.taskargs[0], options.taskargs[1]))
+                sys.exit(1)
+
+            # Recurse into signature comparison
+            logger.debug("Signature file (previous): %s" % files[-2])
+            logger.debug("Signature file (latest): %s" % files[-1])
+            output = bb.siggen.compare_sigfiles(files[-2], files[-1], recursecb, color=color)
 else:
     if options.sigargs:
         logger.error('-s/--signature can only be used together with -t/--task')
         sys.exit(1)
     try:
-        if options.sigdatafile1 and options.sigdatafile2:
-            output = bb.siggen.compare_sigfiles(options.sigdatafile1, options.sigdatafile2, color=color)
+        if not options.dump and options.sigdatafile1 and options.sigdatafile2:
+            with bb.tinfoil.Tinfoil() as tinfoil:
+                tinfoil.prepare(config_only=True)
+                output = bb.siggen.compare_sigfiles(options.sigdatafile1, options.sigdatafile2, recursecb, color=color)
         elif options.sigdatafile1:
             output = bb.siggen.dump_sigfile(options.sigdatafile1)
         else:
@@ -179,5 +194,5 @@ else:
         logger.error('Invalid signature data - ensure you are specifying sigdata/siginfo files')
         sys.exit(1)
 
-    if output:
-        print('\n'.join(output))
+if output:
+    print('\n'.join(output))

+ 0 - 94
bitbake/bin/bitbake-dumpsig

@@ -1,94 +0,0 @@
-#!/usr/bin/env python3
-
-# bitbake-dumpsig
-# BitBake task signature dump utility
-#
-# Copyright (C) 2013 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import sys
-import warnings
-import optparse
-import logging
-import pickle
-
-sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
-
-import bb.tinfoil
-import bb.siggen
-import bb.msg
-
-logger = bb.msg.logger_create('bitbake-dumpsig')
-
-def find_siginfo_task(bbhandler, pn, taskname):
-    """ Find the most recent signature file for the specified PN/task """
-
-    if not hasattr(bb.siggen, 'find_siginfo'):
-        logger.error('Metadata does not support finding signature data files')
-        sys.exit(1)
-
-    if not taskname.startswith('do_'):
-        taskname = 'do_%s' % taskname
-
-    filedates = bb.siggen.find_siginfo(pn, taskname, None, bbhandler.config_data)
-    latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-1:]
-    if not latestfiles:
-        logger.error('No sigdata files found matching %s %s' % (pn, taskname))
-        sys.exit(1)
-
-    return latestfiles[0]
-
-parser = optparse.OptionParser(
-    description = "Dumps siginfo/sigdata files written out by BitBake",
-    usage = """
-  %prog -t recipename taskname
-  %prog sigdatafile""")
-
-parser.add_option("-D", "--debug",
-        help = "enable debug",
-        action = "store_true", dest="debug", default = False)
-
-parser.add_option("-t", "--task",
-        help = "find the signature data file for the specified task",
-        action="store", dest="taskargs", nargs=2, metavar='recipename taskname')
-
-options, args = parser.parse_args(sys.argv)
-
-if options.debug:
-    logger.setLevel(logging.DEBUG)
-
-if options.taskargs:
-    tinfoil = bb.tinfoil.Tinfoil()
-    tinfoil.prepare(config_only = True)
-    file = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1])
-    logger.debug("Signature file: %s" % file)
-elif len(args) == 1:
-    parser.print_help()
-    sys.exit(0)
-else:
-    file = args[1]
-
-try:
-    output = bb.siggen.dump_sigfile(file)
-except IOError as e:
-    logger.error(str(e))
-    sys.exit(1)
-except (pickle.UnpicklingError, EOFError):
-    logger.error('Invalid signature data - ensure you are specifying a sigdata/siginfo file')
-    sys.exit(1)
-
-if output:
-    print('\n'.join(output))

+ 1 - 0
bitbake/bin/bitbake-dumpsig

@@ -0,0 +1 @@
+bitbake-diffsigs

+ 170 - 0
bitbake/bin/bitbake-hashclient

@@ -0,0 +1,170 @@
+#! /usr/bin/env python3
+#
+# Copyright (C) 2019 Garmin Ltd.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import argparse
+import hashlib
+import logging
+import os
+import pprint
+import sys
+import threading
+import time
+
+try:
+    import tqdm
+    ProgressBar = tqdm.tqdm
+except ImportError:
+    class ProgressBar(object):
+        def __init__(self, *args, **kwargs):
+            pass
+
+        def __enter__(self):
+            return self
+
+        def __exit__(self, *args, **kwargs):
+            pass
+
+        def update(self):
+            pass
+
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
+
+import hashserv
+
+DEFAULT_ADDRESS = 'unix://./hashserve.sock'
+METHOD = 'stress.test.method'
+
+
+def main():
+    def handle_stats(args, client):
+        if args.reset:
+            s = client.reset_stats()
+        else:
+            s = client.get_stats()
+        pprint.pprint(s)
+        return 0
+
+    def handle_stress(args, client):
+        def thread_main(pbar, lock):
+            nonlocal found_hashes
+            nonlocal missed_hashes
+            nonlocal max_time
+
+            client = hashserv.create_client(args.address)
+
+            for i in range(args.requests):
+                taskhash = hashlib.sha256()
+                taskhash.update(args.taskhash_seed.encode('utf-8'))
+                taskhash.update(str(i).encode('utf-8'))
+
+                start_time = time.perf_counter()
+                l = client.get_unihash(METHOD, taskhash.hexdigest())
+                elapsed = time.perf_counter() - start_time
+
+                with lock:
+                    if l:
+                        found_hashes += 1
+                    else:
+                        missed_hashes += 1
+
+                    max_time = max(elapsed, max_time)
+                    pbar.update()
+
+        max_time = 0
+        found_hashes = 0
+        missed_hashes = 0
+        lock = threading.Lock()
+        total_requests = args.clients * args.requests
+        start_time = time.perf_counter()
+        with ProgressBar(total=total_requests) as pbar:
+            threads = [threading.Thread(target=thread_main, args=(pbar, lock), daemon=False) for _ in range(args.clients)]
+            for t in threads:
+                t.start()
+
+            for t in threads:
+                t.join()
+
+        elapsed = time.perf_counter() - start_time
+        with lock:
+            print("%d requests in %.1fs. %.1f requests per second" % (total_requests, elapsed, total_requests / elapsed))
+            print("Average request time %.8fs" % (elapsed / total_requests))
+            print("Max request time was %.8fs" % max_time)
+            print("Found %d hashes, missed %d" % (found_hashes, missed_hashes))
+
+        if args.report:
+            with ProgressBar(total=args.requests) as pbar:
+                for i in range(args.requests):
+                    taskhash = hashlib.sha256()
+                    taskhash.update(args.taskhash_seed.encode('utf-8'))
+                    taskhash.update(str(i).encode('utf-8'))
+
+                    outhash = hashlib.sha256()
+                    outhash.update(args.outhash_seed.encode('utf-8'))
+                    outhash.update(str(i).encode('utf-8'))
+
+                    client.report_unihash(taskhash.hexdigest(), METHOD, outhash.hexdigest(), taskhash.hexdigest())
+
+                    with lock:
+                        pbar.update()
+
+    parser = argparse.ArgumentParser(description='Hash Equivalence Client')
+    parser.add_argument('--address', default=DEFAULT_ADDRESS, help='Server address (default "%(default)s")')
+    parser.add_argument('--log', default='WARNING', help='Set logging level')
+
+    subparsers = parser.add_subparsers()
+
+    stats_parser = subparsers.add_parser('stats', help='Show server stats')
+    stats_parser.add_argument('--reset', action='store_true',
+                              help='Reset server stats')
+    stats_parser.set_defaults(func=handle_stats)
+
+    stress_parser = subparsers.add_parser('stress', help='Run stress test')
+    stress_parser.add_argument('--clients', type=int, default=10,
+                               help='Number of simultaneous clients')
+    stress_parser.add_argument('--requests', type=int, default=1000,
+                               help='Number of requests each client will perform')
+    stress_parser.add_argument('--report', action='store_true',
+                               help='Report new hashes')
+    stress_parser.add_argument('--taskhash-seed', default='',
+                               help='Include string in taskhash')
+    stress_parser.add_argument('--outhash-seed', default='',
+                               help='Include string in outhash')
+    stress_parser.set_defaults(func=handle_stress)
+
+    args = parser.parse_args()
+
+    logger = logging.getLogger('hashserv')
+
+    level = getattr(logging, args.log.upper(), None)
+    if not isinstance(level, int):
+        raise ValueError('Invalid log level: %s' % args.log)
+
+    logger.setLevel(level)
+    console = logging.StreamHandler()
+    console.setLevel(level)
+    logger.addHandler(console)
+
+    func = getattr(args, 'func', None)
+    if func:
+        client = hashserv.create_client(args.address)
+        # Try to establish a connection to the server now to detect failures
+        # early
+        client.connect()
+
+        return func(args, client)
+
+    return 0
+
+
+if __name__ == '__main__':
+    try:
+        ret = main()
+    except Exception:
+        ret = 1
+        import traceback
+        traceback.print_exc()
+    sys.exit(ret)

+ 62 - 0
bitbake/bin/bitbake-hashserv

@@ -0,0 +1,62 @@
+#! /usr/bin/env python3
+#
+# Copyright (C) 2018 Garmin Ltd.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import os
+import sys
+import logging
+import argparse
+import sqlite3
+
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
+
+import hashserv
+
+VERSION = "1.0.0"
+
+DEFAULT_BIND = 'unix://./hashserve.sock'
+
+
+def main():
+    parser = argparse.ArgumentParser(description='Hash Equivalence Reference Server. Version=%s' % VERSION,
+                                     epilog='''The bind address is the path to a unix domain socket if it is
+                                               prefixed with "unix://". Otherwise, it is an IP address
+                                               and port in form ADDRESS:PORT. To bind to all addresses, leave
+                                               the ADDRESS empty, e.g. "--bind :8686". To bind to a specific
+                                               IPv6 address, enclose the address in "[]", e.g.
+                                               "--bind [::1]:8686"'''
+                                     )
+
+    parser.add_argument('--bind', default=DEFAULT_BIND, help='Bind address (default "%(default)s")')
+    parser.add_argument('--database', default='./hashserv.db', help='Database file (default "%(default)s")')
+    parser.add_argument('--log', default='WARNING', help='Set logging level')
+
+    args = parser.parse_args()
+
+    logger = logging.getLogger('hashserv')
+
+    level = getattr(logging, args.log.upper(), None)
+    if not isinstance(level, int):
+        raise ValueError('Invalid log level: %s' % args.log)
+
+    logger.setLevel(level)
+    console = logging.StreamHandler()
+    console.setLevel(level)
+    logger.addHandler(console)
+
+    server = hashserv.create_server(args.bind, args.database)
+    server.serve_forever()
+    return 0
+
+
+if __name__ == '__main__':
+    try:
+        ret = main()
+    except Exception:
+        ret = 1
+        import traceback
+        traceback.print_exc()
+    sys.exit(ret)

+ 1 - 11
bitbake/bin/bitbake-layers

@@ -7,18 +7,8 @@
 # Copyright (C) 2011 Mentor Graphics Corporation
 # Copyright (C) 2011-2015 Intel Corporation
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import logging
 import os

+ 4 - 0
bitbake/bin/bitbake-prserv

@@ -1,4 +1,8 @@
 #!/usr/bin/env python3
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
 import os
 import sys,logging
 import optparse

+ 5 - 11
bitbake/bin/bitbake-selftest

@@ -2,18 +2,8 @@
 #
 # Copyright (C) 2012 Richard Purdie
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import os
 import sys, logging
@@ -22,6 +12,7 @@ sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib
 import unittest
 try:
     import bb
+    import hashserv
     import layerindexlib
 except RuntimeError as exc:
     sys.exit(str(exc))
@@ -33,7 +24,10 @@ tests = ["bb.tests.codeparser",
          "bb.tests.event",
          "bb.tests.fetch",
          "bb.tests.parse",
+         "bb.tests.persist_data",
+         "bb.tests.runqueue",
          "bb.tests.utils",
+         "hashserv.tests",
          "layerindexlib.tests.layerindexobj",
          "layerindexlib.tests.restapi",
          "layerindexlib.tests.cooker"]

+ 15 - 4
bitbake/bin/bitbake-worker

@@ -1,4 +1,7 @@
 #!/usr/bin/env python3
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
 
 import os
 import sys
@@ -136,7 +139,7 @@ def sigterm_handler(signum, frame):
     os.killpg(0, signal.SIGTERM)
     sys.exit()
 
-def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, appends, taskdepdata, extraconfigdata, quieterrors=False, dry_run_exec=False):
+def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, extraconfigdata, quieterrors=False, dry_run_exec=False):
     # We need to setup the environment BEFORE the fork, since
     # a fork() or exec*() activates PSEUDO...
 
@@ -231,10 +234,13 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, append
                     the_data.setVar(varname, value)
 
                 bb.parse.siggen.set_taskdata(workerdata["sigdata"])
+                if "newhashes" in workerdata:
+                    bb.parse.siggen.set_taskhashes(workerdata["newhashes"])
                 ret = 0
 
                 the_data = bb_cache.loadDataFull(fn, appends)
-                the_data.setVar('BB_TASKHASH', workerdata["runq_hash"][task])
+                the_data.setVar('BB_TASKHASH', taskhash)
+                the_data.setVar('BB_UNIHASH', unihash)
 
                 bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN"), taskname.replace("do_", "")))
 
@@ -373,6 +379,7 @@ class BitbakeWorker(object):
                 self.handle_item(b"cookerconfig", self.handle_cookercfg)
                 self.handle_item(b"extraconfigdata", self.handle_extraconfigdata)
                 self.handle_item(b"workerdata", self.handle_workerdata)
+                self.handle_item(b"newtaskhashes", self.handle_newtaskhashes)
                 self.handle_item(b"runtask", self.handle_runtask)
                 self.handle_item(b"finishnow", self.handle_finishnow)
                 self.handle_item(b"ping", self.handle_ping)
@@ -411,6 +418,10 @@ class BitbakeWorker(object):
         bb.msg.loggerDefaultDomains = self.workerdata["logdefaultdomain"]
         for mc in self.databuilder.mcdata:
             self.databuilder.mcdata[mc].setVar("PRSERV_HOST", self.workerdata["prhost"])
+            self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.workerdata["hashservaddr"])
+
+    def handle_newtaskhashes(self, data):
+        self.workerdata["newhashes"] = pickle.loads(data)
 
     def handle_ping(self, _):
         workerlog_write("Handling ping\n")
@@ -425,10 +436,10 @@ class BitbakeWorker(object):
         sys.exit(0)
 
     def handle_runtask(self, data):
-        fn, task, taskname, quieterrors, appends, taskdepdata, dry_run_exec = pickle.loads(data)
+        fn, task, taskname, taskhash, unihash, quieterrors, appends, taskdepdata, dry_run_exec = pickle.loads(data)
         workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
 
-        pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, appends, taskdepdata, self.extraconfigdata, quieterrors, dry_run_exec)
+        pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, self.extraconfigdata, quieterrors, dry_run_exec)
 
         self.build_pids[pid] = task
         self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout)

+ 1 - 13
bitbake/bin/bitdoc

@@ -1,21 +1,9 @@
 #!/usr/bin/env python3
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # Copyright (C) 2005 Holger Hans Peter Freyther
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import optparse, os, sys
 

+ 4 - 0
bitbake/bin/git-make-shallow

@@ -1,4 +1,8 @@
 #!/usr/bin/env python3
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
 """git-make-shallow: make the current git repository shallow
 
 Remove the history of the specified revisions, then optionally filter the

+ 1 - 11
bitbake/bin/toaster

@@ -3,19 +3,9 @@
 # toaster - shell script to start Toaster
 
 # Copyright (C) 2013-2015 Intel Corp.
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
+# SPDX-License-Identifier: GPL-2.0-or-later
 #
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see http://www.gnu.org/licenses/.
 
 HELP="
 Usage: source toaster start|stop [webport=<address:port>] [noweb] [nobuild] [toasterdir]

+ 2 - 15
bitbake/bin/toaster-eventreplay

@@ -1,25 +1,12 @@
 #!/usr/bin/env python3
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # Copyright (C) 2014        Alex Damian
 #
+# SPDX-License-Identifier: GPL-2.0-only
+#
 # This file re-uses code spread throughout other Bitbake source files.
 # As such, all other copyrights belong to their own right holders.
 #
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 """
 This command takes a filename as a single parameter. The filename is read

+ 1 - 1
bitbake/classes/base.bbclass

@@ -44,7 +44,7 @@ python do_showdata() {
 	# emit the metadata which isnt valid shell
 	for e in bb.data.keys(d):
 		if d.getVarFlag(e, 'python', False):
-			bb.plain("\npython %s () {\n%s}" % (e, d.getVar(e, True)))
+			bb.plain("\npython %s () {\n%s}" % (e, d.getVar(e)))
 }
 
 addtask listtasks

+ 0 - 2
bitbake/contrib/dump_cache.py

@@ -1,6 +1,4 @@
 #!/usr/bin/env python3
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # Copyright (C) 2012, 2018 Wind River Systems, Inc.
 #

+ 39 - 39
bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.xml

@@ -31,7 +31,7 @@
             <para>
                 Prior to executing BitBake, you should take advantage of available
                 parallel thread execution on your build host by setting the
-                <link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
+                <link linkend='var-bb-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
                 variable in your project's <filename>local.conf</filename>
                 configuration file.
             </para>
@@ -87,9 +87,9 @@
         <para>
             The <filename>layer.conf</filename> files are used to
             construct key variables such as
-            <link linkend='var-BBPATH'><filename>BBPATH</filename></link>
+            <link linkend='var-bb-BBPATH'><filename>BBPATH</filename></link>
             and
-            <link linkend='var-BBFILES'><filename>BBFILES</filename></link>.
+            <link linkend='var-bb-BBFILES'><filename>BBFILES</filename></link>.
             <filename>BBPATH</filename> is used to search for
             configuration and class files under the
             <filename>conf</filename> and <filename>classes</filename>
@@ -117,19 +117,19 @@
             at certain variables, including:
             <itemizedlist>
                 <listitem><para>
-                    <link linkend='var-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>
+                    <link linkend='var-bb-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>
                     </para></listitem>
                 <listitem><para>
-                    <link linkend='var-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link>
+                    <link linkend='var-bb-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link>
                     </para></listitem>
                 <listitem><para>
-                    <link linkend='var-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link>
+                    <link linkend='var-bb-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link>
                     </para></listitem>
                 <listitem><para>
-                    <link linkend='var-BB_ORIGENV'><filename>BB_ORIGENV</filename></link>
+                    <link linkend='var-bb-BB_ORIGENV'><filename>BB_ORIGENV</filename></link>
                     </para></listitem>
                 <listitem><para>
-                    <link linkend='var-BITBAKE_UI'><filename>BITBAKE_UI</filename></link>
+                    <link linkend='var-bb-BITBAKE_UI'><filename>BITBAKE_UI</filename></link>
                     </para></listitem>
             </itemizedlist>
             The first four variables in this list relate to how BitBake treats shell
@@ -156,7 +156,7 @@
             BitBake first searches the current working directory for an
             optional <filename>conf/bblayers.conf</filename> configuration file.
             This file is expected to contain a
-            <link linkend='var-BBLAYERS'><filename>BBLAYERS</filename></link>
+            <link linkend='var-bb-BBLAYERS'><filename>BBLAYERS</filename></link>
             variable that is a space-delimited list of 'layer' directories.
             Recall that if BitBake cannot find a <filename>bblayers.conf</filename>
             file, then it is assumed the user has set the <filename>BBPATH</filename>
@@ -166,10 +166,10 @@
         <para>
             For each directory (layer) in this list, a <filename>conf/layer.conf</filename>
             file is located and parsed with the
-            <link linkend='var-LAYERDIR'><filename>LAYERDIR</filename></link>
+            <link linkend='var-bb-LAYERDIR'><filename>LAYERDIR</filename></link>
             variable being set to the directory where the layer was found.
             The idea is these files automatically set up
-            <link linkend='var-BBPATH'><filename>BBPATH</filename></link>
+            <link linkend='var-bb-BBPATH'><filename>BBPATH</filename></link>
             and other variables correctly for a given build directory.
         </para>
 
@@ -189,7 +189,7 @@
             depending on the environment variables previously
             mentioned or set in the configuration files.
             The
-            "<link linkend='ref-variables-glos'>Variables Glossary</link>"
+            "<link linkend='ref-bb-variables-glos'>Variables Glossary</link>"
             chapter presents a full list of variables.
         </para>
 
@@ -204,7 +204,7 @@
         <para>
             The <filename>base.bbclass</filename> file is always included.
             Other classes that are specified in the configuration using the
-            <link linkend='var-INHERIT'><filename>INHERIT</filename></link>
+            <link linkend='var-bb-INHERIT'><filename>INHERIT</filename></link>
             variable are also included.
             BitBake searches for class files in a
             <filename>classes</filename> subdirectory under
@@ -270,7 +270,7 @@
 
         <para>
             During the configuration phase, BitBake will have set
-            <link linkend='var-BBFILES'><filename>BBFILES</filename></link>.
+            <link linkend='var-bb-BBFILES'><filename>BBFILES</filename></link>.
             BitBake now uses it to construct a list of recipes to parse,
             along with any append files (<filename>.bbappend</filename>)
             to apply.
@@ -292,7 +292,7 @@
             Any inherit statements cause BitBake to find and
             then parse class files (<filename>.bbclass</filename>)
             using
-            <link linkend='var-BBPATH'><filename>BBPATH</filename></link>
+            <link linkend='var-bb-BBPATH'><filename>BBPATH</filename></link>
             as the search path.
             Finally, BitBake parses in order any append files found in
             <filename>BBFILES</filename>.
@@ -303,8 +303,8 @@
             pieces of metadata.
             For example, in <filename>bitbake.conf</filename> the recipe
             name and version are used to set the variables
-            <link linkend='var-PN'><filename>PN</filename></link> and
-            <link linkend='var-PV'><filename>PV</filename></link>:
+            <link linkend='var-bb-PN'><filename>PN</filename></link> and
+            <link linkend='var-bb-PV'><filename>PV</filename></link>:
             <literallayout class='monospaced'>
      PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
      PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
@@ -336,7 +336,7 @@
             recipe information.
             The validity of this cache is determined by first computing a
             checksum of the base configuration data (see
-            <link linkend='var-BB_HASHCONFIG_WHITELIST'><filename>BB_HASHCONFIG_WHITELIST</filename></link>)
+            <link linkend='var-bb-BB_HASHCONFIG_WHITELIST'><filename>BB_HASHCONFIG_WHITELIST</filename></link>)
             and then checking if the checksum matches.
             If that checksum matches what is in the cache and the recipe
             and class files have not changed, Bitbake is able to use
@@ -384,9 +384,9 @@
             the recipe can be known.
             Each recipe's <filename>PROVIDES</filename> list is created
             implicitly through the recipe's
-            <link linkend='var-PN'><filename>PN</filename></link> variable
+            <link linkend='var-bb-PN'><filename>PN</filename></link> variable
             and explicitly through the recipe's
-            <link linkend='var-PROVIDES'><filename>PROVIDES</filename></link>
+            <link linkend='var-bb-PROVIDES'><filename>PROVIDES</filename></link>
             variable, which is optional.
         </para>
 
@@ -427,7 +427,7 @@
      PREFERRED_PROVIDER_virtual/kernel = "linux-yocto"
             </literallayout>
             The default
-            <link linkend='var-PREFERRED_PROVIDER'><filename>PREFERRED_PROVIDER</filename></link>
+            <link linkend='var-bb-PREFERRED_PROVIDER'><filename>PREFERRED_PROVIDER</filename></link>
             is the provider with the same name as the target.
             Bitbake iterates through each target it needs to build and
             resolves them and their dependencies using this process.
@@ -439,10 +439,10 @@
             BitBake defaults to the highest version of a provider.
             Version comparisons are made using the same method as Debian.
             You can use the
-            <link linkend='var-PREFERRED_VERSION'><filename>PREFERRED_VERSION</filename></link>
+            <link linkend='var-bb-PREFERRED_VERSION'><filename>PREFERRED_VERSION</filename></link>
             variable to specify a particular version.
             You can influence the order by using the
-            <link linkend='var-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
+            <link linkend='var-bb-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
             variable.
         </para>
 
@@ -464,7 +464,7 @@
             BitBake defaults to selecting the most recent
             version, unless otherwise specified.
             If the recipe in question has a
-            <link linkend='var-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
+            <link linkend='var-bb-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
             set lower than the other recipes (default is 0), then
             it will not be selected.
             This allows the person or persons maintaining
@@ -475,9 +475,9 @@
 
         <para>
             If the first recipe is named <filename>a_1.1.bb</filename>, then the
-            <link linkend='var-PN'><filename>PN</filename></link> variable
+            <link linkend='var-bb-PN'><filename>PN</filename></link> variable
             will be set to “a”, and the
-            <link linkend='var-PV'><filename>PV</filename></link>
+            <link linkend='var-bb-PV'><filename>PV</filename></link>
             variable will be set to 1.1.
         </para>
 
@@ -532,11 +532,11 @@
         <para>
             Dependencies are defined through several variables.
             You can find information about variables BitBake uses in
-            the <link linkend='ref-variables-glos'>Variables Glossary</link>
+            the <link linkend='ref-bb-variables-glos'>Variables Glossary</link>
             near the end of this manual.
             At a basic level, it is sufficient to know that BitBake uses the
-            <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link> and
-            <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link> variables when
+            <link linkend='var-bb-DEPENDS'><filename>DEPENDS</filename></link> and
+            <link linkend='var-bb-RDEPENDS'><filename>RDEPENDS</filename></link> variables when
             calculating dependencies.
         </para>
 
@@ -560,7 +560,7 @@
 
         <para>
             The build now starts with BitBake forking off threads up to the limit set in the
-            <link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
+            <link linkend='var-bb-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
             variable.
             BitBake continues to fork threads as long as there are tasks ready to run,
             those tasks have all their dependencies met, and the thread threshold has not been
@@ -574,7 +574,7 @@
 
         <para>
             As each task completes, a timestamp is written to the directory specified by the
-            <link linkend='var-STAMP'><filename>STAMP</filename></link> variable.
+            <link linkend='var-bb-STAMP'><filename>STAMP</filename></link> variable.
             On subsequent runs, BitBake looks in the build directory within
             <filename>tmp/stamps</filename> and does not rerun
             tasks that are already completed unless a timestamp is found to be invalid.
@@ -618,7 +618,7 @@
         <para>
             Tasks can be either a shell task or a Python task.
             For shell tasks, BitBake writes a shell script to
-            <filename>${</filename><link linkend='var-T'><filename>T</filename></link><filename>}/run.do_taskname.pid</filename>
+            <filename>${</filename><link linkend='var-bb-T'><filename>T</filename></link><filename>}/run.do_taskname.pid</filename>
             and then executes the script.
             The generated shell script contains all the exported variables,
             and the shell functions with all variables expanded.
@@ -645,10 +645,10 @@
             behavior:
             <itemizedlist>
                 <listitem><para>
-                    <link linkend='var-BB_SCHEDULER'><filename>BB_SCHEDULER</filename></link>
+                    <link linkend='var-bb-BB_SCHEDULER'><filename>BB_SCHEDULER</filename></link>
                     </para></listitem>
                 <listitem><para>
-                    <link linkend='var-BB_SCHEDULERS'><filename>BB_SCHEDULERS</filename></link>
+                    <link linkend='var-bb-BB_SCHEDULERS'><filename>BB_SCHEDULERS</filename></link>
                     </para></listitem>
             </itemizedlist>
             It is possible to have functions run before and after a task's main
@@ -684,7 +684,7 @@
             The simplistic approach for excluding the working directory is to set
             it to some fixed value and create the checksum for the "run" script.
             BitBake goes one step better and uses the
-            <link linkend='var-BB_HASHBASE_WHITELIST'><filename>BB_HASHBASE_WHITELIST</filename></link>
+            <link linkend='var-bb-BB_HASHBASE_WHITELIST'><filename>BB_HASHBASE_WHITELIST</filename></link>
             variable to define a list of variables that should never be included
             when generating the signatures.
         </para>
@@ -795,7 +795,7 @@
             This results in any metadata change that changes the task hash, automatically
             causing the task to be run again.
             This removes the need to bump
-            <link linkend='var-PR'><filename>PR</filename></link>
+            <link linkend='var-bb-PR'><filename>PR</filename></link>
             values, and changes to metadata automatically ripple across the build.
         </para>
 
@@ -884,7 +884,7 @@
 
         <para>
             BitBake first calls the function defined by the
-            <link linkend='var-BB_HASHCHECK_FUNCTION'><filename>BB_HASHCHECK_FUNCTION</filename></link>
+            <link linkend='var-bb-BB_HASHCHECK_FUNCTION'><filename>BB_HASHCHECK_FUNCTION</filename></link>
             variable with a list of tasks and corresponding
             hashes it wants to build.
             This function is designed to be fast and returns a list
@@ -908,7 +908,7 @@
             For example, it is pointless to obtain a compiler if you
             already have the compiled binary.
             To handle this, BitBake calls the
-            <link linkend='var-BB_SETSCENE_DEPVALID'><filename>BB_SETSCENE_DEPVALID</filename></link>
+            <link linkend='var-bb-BB_SETSCENE_DEPVALID'><filename>BB_SETSCENE_DEPVALID</filename></link>
             function for each successful setscene task to know whether or not it needs
             to obtain the dependencies of that task.
         </para>
@@ -916,7 +916,7 @@
         <para>
             Finally, after all the setscene tasks have executed, BitBake calls the
             function listed in
-            <link linkend='var-BB_SETSCENE_VERIFY_FUNCTION2'><filename>BB_SETSCENE_VERIFY_FUNCTION2</filename></link>
+            <link linkend='var-bb-BB_SETSCENE_VERIFY_FUNCTION2'><filename>BB_SETSCENE_VERIFY_FUNCTION2</filename></link>
             with the list of tasks BitBake thinks has been "covered".
             The metadata can then ensure that this list is correct and can
             inform BitBake that it wants specific tasks to be run regardless

+ 40 - 29
bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml

@@ -44,7 +44,7 @@
             </literallayout>
             This code sets up an instance of the fetch class.
             The instance uses a space-separated list of URLs from the
-            <link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
+            <link linkend='var-bb-SRC_URI'><filename>SRC_URI</filename></link>
             variable and then calls the <filename>download</filename>
             method to download the files.
         </para>
@@ -78,7 +78,7 @@
                 <listitem><para><emphasis>Pre-mirror Sites:</emphasis>
                     BitBake first uses pre-mirrors to try and find source files.
                     These locations are defined using the
-                    <link linkend='var-PREMIRRORS'><filename>PREMIRRORS</filename></link>
+                    <link linkend='var-bb-PREMIRRORS'><filename>PREMIRRORS</filename></link>
                     variable.
                     </para></listitem>
                 <listitem><para><emphasis>Source URI:</emphasis>
@@ -88,7 +88,7 @@
                 <listitem><para><emphasis>Mirror Sites:</emphasis>
                     If fetch failures occur, BitBake next uses mirror locations as
                     defined by the
-                    <link linkend='var-MIRRORS'><filename>MIRRORS</filename></link>
+                    <link linkend='var-bb-MIRRORS'><filename>MIRRORS</filename></link>
                     variable.
                     </para></listitem>
             </itemizedlist>
@@ -144,7 +144,7 @@
             Any source files that are not local (i.e.
             downloaded from the Internet) are placed into the download
             directory, which is specified by the
-            <link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
+            <link linkend='var-bb-DL_DIR'><filename>DL_DIR</filename></link>
             variable.
         </para>
 
@@ -184,11 +184,11 @@
 
         <para>
             If
-            <link linkend='var-BB_STRICT_CHECKSUM'><filename>BB_STRICT_CHECKSUM</filename></link>
+            <link linkend='var-bb-BB_STRICT_CHECKSUM'><filename>BB_STRICT_CHECKSUM</filename></link>
             is set, any download without a checksum triggers an
             error message.
             The
-            <link linkend='var-BB_NO_NETWORK'><filename>BB_NO_NETWORK</filename></link>
+            <link linkend='var-bb-BB_NO_NETWORK'><filename>BB_NO_NETWORK</filename></link>
             variable can be used to make any attempted network access a fatal
             error, which is useful for checking that mirrors are complete
             as well as other things.
@@ -265,11 +265,11 @@
                 The filename you specify within the URL can be
                 either an absolute or relative path to a file.
                 If the filename is relative, the contents of the
-                <link linkend='var-FILESPATH'><filename>FILESPATH</filename></link>
+                <link linkend='var-bb-FILESPATH'><filename>FILESPATH</filename></link>
                 variable is used in the same way
                 <filename>PATH</filename> is used to find executables.
                 If the file cannot be found, it is assumed that it is available in
-                <link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
+                <link linkend='var-bb-DL_DIR'><filename>DL_DIR</filename></link>
                 by the time the <filename>download()</filename> method is called.
             </para>
 
@@ -304,7 +304,7 @@
                 allows the name of the downloaded file to be specified.
                 Specifying the name of the downloaded file is useful
                 for avoiding collisions in
-                <link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
+                <link linkend='var-bb-DL_DIR'><filename>DL_DIR</filename></link>
                 when dealing with multiple files that have the same name.
             </para>
 
@@ -355,7 +355,7 @@
                         A special value of "now" causes the checkout to
                         be updated on every build.
                         </para></listitem>
-                    <listitem><para><emphasis><link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>:</emphasis>
+                    <listitem><para><emphasis><link linkend='var-bb-CVSDIR'><filename>CVSDIR</filename></link>:</emphasis>
                         Specifies where a temporary checkout is saved.
                         The location is often <filename>DL_DIR/cvs</filename>.
                         </para></listitem>
@@ -395,7 +395,7 @@
                     <listitem><para><emphasis>"date":</emphasis>
                         Specifies a date.
                         If no "date" is specified, the
-                        <link linkend='var-SRCDATE'><filename>SRCDATE</filename></link>
+                        <link linkend='var-bb-SRCDATE'><filename>SRCDATE</filename></link>
                         of the configuration is used to checkout a specific date.
                         The special value of "now" causes the checkout to be
                         updated on every build.
@@ -406,7 +406,7 @@
                         to which the module is unpacked.
                         You are forcing the module into a special
                         directory relative to
-                        <link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>.
+                        <link linkend='var-bb-CVSDIR'><filename>CVSDIR</filename></link>.
                         </para></listitem>
                     <listitem><para><emphasis>"rsh"</emphasis>
                         Used in conjunction with the "method" parameter.
@@ -448,7 +448,7 @@
                 <filename>FETCHCMD_svn</filename>, which defaults
                 to "svn".
                 The fetcher's temporary working directory is set by
-                <link linkend='var-SVNDIR'><filename>SVNDIR</filename></link>,
+                <link linkend='var-bb-SVNDIR'><filename>SVNDIR</filename></link>,
                 which is usually <filename>DL_DIR/svn</filename>.
             </para>
 
@@ -509,7 +509,7 @@
                 source control system.
                 The fetcher works by creating a bare clone of the
                 remote into
-                <link linkend='var-GITDIR'><filename>GITDIR</filename></link>,
+                <link linkend='var-bb-GITDIR'><filename>GITDIR</filename></link>,
                 which is usually <filename>DL_DIR/git2</filename>.
                 This bare clone is then cloned into the work directory during the
                 unpack stage when a specific tree is checked out.
@@ -588,6 +588,14 @@
                         The name of the path in which to place the checkout.
                         By default, the path is <filename>git/</filename>.
                         </para></listitem>
+                    <listitem><para><emphasis>"usehead":</emphasis>
+                        Enables local <filename>git://</filename> URLs to use the
+                        current branch HEAD as the revision for use with
+                        <filename>AUTOREV</filename>.
+                        The "usehead" parameter implies no branch and only works
+                        when the transfer protocol is
+                        <filename>file://</filename>.
+                        </para></listitem>
                 </itemizedlist>
                 Here are some example URLs:
                 <literallayout class='monospaced'>
@@ -604,7 +612,7 @@
                 This fetcher submodule inherits from the
                 <link linkend='git-fetcher'>Git fetcher</link> and extends
                 that fetcher's behavior by fetching a repository's submodules.
-                <link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
+                <link linkend='var-bb-SRC_URI'><filename>SRC_URI</filename></link>
                 is passed to the Git fetcher as described in the
                 "<link linkend='git-fetcher'>Git Fetcher (<filename>git://</filename>)</link>"
                 section.
@@ -639,9 +647,9 @@
 
             <para>
                 To use this fetcher, make sure your recipe has proper
-                <link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>,
-                <link linkend='var-SRCREV'><filename>SRCREV</filename></link>, and
-                <link linkend='var-PV'><filename>PV</filename></link> settings.
+                <link linkend='var-bb-SRC_URI'><filename>SRC_URI</filename></link>,
+                <link linkend='var-bb-SRCREV'><filename>SRCREV</filename></link>, and
+                <link linkend='var-bb-PV'><filename>PV</filename></link> settings.
                 Here is an example:
                 <literallayout class='monospaced'>
      SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
@@ -726,15 +734,15 @@
                 <filename>FETCHCMD_p4</filename>, which defaults
                 to "p4".
                 The fetcher's temporary working directory is set by
-                <link linkend='var-P4DIR'><filename>P4DIR</filename></link>,
+                <link linkend='var-bb-P4DIR'><filename>P4DIR</filename></link>,
                 which defaults to "DL_DIR/p4".
             </para>
 
             <para>
                 To use this fetcher, make sure your recipe has proper
-                <link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>,
-                <link linkend='var-SRCREV'><filename>SRCREV</filename></link>, and
-                <link linkend='var-PV'><filename>PV</filename></link> values.
+                <link linkend='var-bb-SRC_URI'><filename>SRC_URI</filename></link>,
+                <link linkend='var-bb-SRCREV'><filename>SRCREV</filename></link>, and
+                <link linkend='var-bb-PV'><filename>PV</filename></link> values.
                 The p4 executable is able to use the config file defined by your
                 system's <filename>P4CONFIG</filename> environment variable in
                 order to define the Perforce server URL and port, username, and
@@ -785,9 +793,9 @@
                 <filename>google-repo</filename> source control system.
                 The fetcher works by initiating and syncing sources of the
                 repository into
-                <link linkend='var-REPODIR'><filename>REPODIR</filename></link>,
+                <link linkend='var-bb-REPODIR'><filename>REPODIR</filename></link>,
                 which is usually
-                <link linkend='var-DL_DIR'><filename>DL_DIR</filename></link><filename>/repo</filename>.
+                <link linkend='var-bb-DL_DIR'><filename>DL_DIR</filename></link><filename>/repo</filename>.
             </para>
 
             <para>
@@ -824,19 +832,22 @@
                         Bazaar (<filename>bzr://</filename>)
                         </para></listitem>
                     <listitem><para>
-                        Trees using Git Annex (<filename>gitannex://</filename>)
+                        Mercurial (<filename>hg://</filename>)
                         </para></listitem>
                     <listitem><para>
-                        Secure FTP (<filename>sftp://</filename>)
+                        npm (<filename>npm://</filename>)
                         </para></listitem>
                     <listitem><para>
-                        Secure Shell (<filename>ssh://</filename>)
+                        OSC (<filename>osc://</filename>)
                         </para></listitem>
                     <listitem><para>
-                        OSC (<filename>osc://</filename>)
+                        Secure FTP (<filename>sftp://</filename>)
                         </para></listitem>
                     <listitem><para>
-                        Mercurial (<filename>hg://</filename>)
+                        Secure Shell (<filename>ssh://</filename>)
+                        </para></listitem>
+                    <listitem><para>
+                        Trees using Git Annex (<filename>gitannex://</filename>)
                         </para></listitem>
                 </itemizedlist>
                 No documentation currently exists for these lesser used

+ 14 - 14
bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml

@@ -194,7 +194,7 @@
                 <para>
                 When you run BitBake, it begins looking for metadata files.
                 The
-                <link linkend='var-BBPATH'><filename>BBPATH</filename></link>
+                <link linkend='var-bb-BBPATH'><filename>BBPATH</filename></link>
                 variable is what tells BitBake where to look for those files.
                 <filename>BBPATH</filename> is not set and you need to set it.
                 Without <filename>BBPATH</filename>, Bitbake cannot
@@ -273,14 +273,14 @@
                 some editor to create the <filename>bitbake.conf</filename>
                 so that it contains the following:
                 <literallayout class='monospaced'>
-     <link linkend='var-PN'>PN</link>  = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
+     <link linkend='var-bb-PN'>PN</link>  = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
                 </literallayout>
                 <literallayout class='monospaced'>
-     TMPDIR  = "${<link linkend='var-TOPDIR'>TOPDIR</link>}/tmp"
-     <link linkend='var-CACHE'>CACHE</link>   = "${TMPDIR}/cache"
-     <link linkend='var-STAMP'>STAMP</link>   = "${TMPDIR}/${PN}/stamps"
-     <link linkend='var-T'>T</link>       = "${TMPDIR}/${PN}/work"
-     <link linkend='var-B'>B</link>       = "${TMPDIR}/${PN}"
+     TMPDIR  = "${<link linkend='var-bb-TOPDIR'>TOPDIR</link>}/tmp"
+     <link linkend='var-bb-CACHE'>CACHE</link>   = "${TMPDIR}/cache"
+     <link linkend='var-bb-STAMP'>STAMP</link>   = "${TMPDIR}/${PN}/stamps"
+     <link linkend='var-bb-T'>T</link>       = "${TMPDIR}/${PN}/work"
+     <link linkend='var-bb-B'>B</link>       = "${TMPDIR}/${PN}"
                 </literallayout>
                 <note>
                     Without a value for <filename>PN</filename>, the
@@ -402,12 +402,12 @@
                 Move to the <filename>conf</filename> directory and create a
                 <filename>layer.conf</filename> file that has the following:
                 <literallayout class='monospaced'>
-     BBPATH .= ":${<link linkend='var-LAYERDIR'>LAYERDIR</link>}"
+     BBPATH .= ":${<link linkend='var-bb-LAYERDIR'>LAYERDIR</link>}"
 
-     <link linkend='var-BBFILES'>BBFILES</link> += "${LAYERDIR}/*.bb"
+     <link linkend='var-bb-BBFILES'>BBFILES</link> += "${LAYERDIR}/*.bb"
 
-     <link linkend='var-BBFILE_COLLECTIONS'>BBFILE_COLLECTIONS</link> += "mylayer"
-     <link linkend='var-BBFILE_PATTERN'>BBFILE_PATTERN_mylayer</link> := "^${LAYERDIR_RE}/"
+     <link linkend='var-bb-BBFILE_COLLECTIONS'>BBFILE_COLLECTIONS</link> += "mylayer"
+     <link linkend='var-bb-BBFILE_PATTERN'>BBFILE_PATTERN_mylayer</link> := "^${LAYERDIR_RE}/"
                 </literallayout>
                 For information on these variables, click the links
                 to go to the definitions in the glossary.</para>
@@ -416,9 +416,9 @@
                 a recipe file named <filename>printhello.bb</filename> that
                 has the following:
                 <literallayout class='monospaced'>
-     <link linkend='var-DESCRIPTION'>DESCRIPTION</link> = "Prints Hello World"
-     <link linkend='var-PN'>PN</link> = 'printhello'
-     <link linkend='var-PV'>PV</link> = '1'
+     <link linkend='var-bb-DESCRIPTION'>DESCRIPTION</link> = "Prints Hello World"
+     <link linkend='var-bb-PN'>PN</link> = 'printhello'
+     <link linkend='var-bb-PV'>PV</link> = '1'
 
      python do_build() {
         bb.plain("********************");

+ 9 - 9
bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.xml

@@ -781,7 +781,7 @@
                     target, you must also enable BitBake to perform multiple
                     configuration builds.
                     Enabling is accomplished by setting the
-                    <link linkend='var-BBMULTICONFIG'><filename>BBMULTICONFIG</filename></link>
+                    <link linkend='var-bb-BBMULTICONFIG'><filename>BBMULTICONFIG</filename></link>
                     variable in the <filename>local.conf</filename>
                     configuration file.
                     As an example, suppose you had configuration files
@@ -791,7 +791,7 @@
                     The following statement in the
                     <filename>local.conf</filename> file both enables
                     BitBake to perform multiple configuration builds and
-                    specifies the two multiconfigs:
+                    specifies the two extra multiconfigs:
                     <literallayout class='monospaced'>
      BBMULTICONFIG = "target1 target2"
                     </literallayout>
@@ -803,13 +803,13 @@
                     builds, use the following command form to start the
                     builds:
                     <literallayout class='monospaced'>
-     $ bitbake [multiconfig:<replaceable>multiconfigname</replaceable>:]<replaceable>target</replaceable> [[[multiconfig:<replaceable>multiconfigname</replaceable>:]<replaceable>target</replaceable>] ... ]
+     $ bitbake [mc:<replaceable>multiconfigname</replaceable>:]<replaceable>target</replaceable> [[[mc:<replaceable>multiconfigname</replaceable>:]<replaceable>target</replaceable>] ... ]
                     </literallayout>
-                    Here is an example for two multiconfigs:
+                    Here is an example for two extra multiconfigs:
                     <filename>target1</filename> and
                     <filename>target2</filename>:
                     <literallayout class='monospaced'>
-     $ bitbake multiconfig:target1:<replaceable>target</replaceable> multiconfig:target2:<replaceable>target</replaceable>
+     $ bitbake mc::<replaceable>target</replaceable> mc:target1:<replaceable>target</replaceable> mc:target2:<replaceable>target</replaceable>
                     </literallayout>
                 </para>
             </section>
@@ -837,13 +837,13 @@
                     build, you must declare the dependencies in the recipe
                     using the following statement form:
                     <literallayout class='monospaced'>
-     <replaceable>task_or_package</replaceable>[mcdepends] = "multiconfig:<replaceable>from_multiconfig</replaceable>:<replaceable>to_multiconfig</replaceable>:<replaceable>recipe_name</replaceable>:<replaceable>task_on_which_to_depend</replaceable>"
+     <replaceable>task_or_package</replaceable>[mcdepends] = "mc:<replaceable>from_multiconfig</replaceable>:<replaceable>to_multiconfig</replaceable>:<replaceable>recipe_name</replaceable>:<replaceable>task_on_which_to_depend</replaceable>"
                     </literallayout>
                     To better show how to use this statement, consider an
                     example with two multiconfigs: <filename>target1</filename>
                     and <filename>target2</filename>:
                     <literallayout class='monospaced'>
-     <replaceable>image_task</replaceable>[mcdepends] = "multiconfig:target1:target2:<replaceable>image2</replaceable>:<replaceable>rootfs_task</replaceable>"
+     <replaceable>image_task</replaceable>[mcdepends] = "mc:target1:target2:<replaceable>image2</replaceable>:<replaceable>rootfs_task</replaceable>"
                     </literallayout>
                     In this example, the
                     <replaceable>from_multiconfig</replaceable> is "target1" and
@@ -859,7 +859,7 @@
                    Once you set up this dependency, you can build the
                    "target1" multiconfig using a BitBake command as follows:
                    <literallayout class='monospaced'>
-     $ bitbake multiconfig:target1:<replaceable>image1</replaceable>
+     $ bitbake mc:target1:<replaceable>image1</replaceable>
                    </literallayout>
                    This command executes all the tasks needed to create
                    <replaceable>image1</replaceable> for the "target1"
@@ -875,7 +875,7 @@
                    Consider this change to the statement in the
                    <replaceable>image1</replaceable> recipe:
                    <literallayout class='monospaced'>
-     <replaceable>image_task</replaceable>[mcdepends] = "multiconfig:target1:target2:<replaceable>image2</replaceable>:<replaceable>image_task</replaceable>"
+     <replaceable>image_task</replaceable>[mcdepends] = "mc:target1:target2:<replaceable>image2</replaceable>:<replaceable>image_task</replaceable>"
                    </literallayout>
                    In this case, BitBake must create
                    <replaceable>image2</replaceable> for the "target2"

+ 126 - 53
bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml

@@ -61,6 +61,78 @@
             </para>
         </section>
 
+        <section id='modifying-existing-variables'>
+            <title>Modifying Existing Variables</title>
+
+            <para>
+                Sometimes you need to modify existing variables.
+                Following are some cases where you might find you want to
+                modify an existing variable:
+                <itemizedlist>
+                    <listitem><para>
+                        Customize a recipe that uses the variable.
+                        </para></listitem>
+                    <listitem><para>
+                        Change a variable's default value used in a
+                        <filename>*.bbclass</filename> file.
+                        </para></listitem>
+                    <listitem><para>
+                        Change the variable in a <filename>*.bbappend</filename>
+                        file to override the variable in the original recipe.
+                        </para></listitem>
+                    <listitem><para>
+                        Change the variable in a configuration file so that the
+                        value overrides an existing configuration.
+                        </para></listitem>
+                </itemizedlist>
+            </para>
+
+            <para>
+                Changing a variable value can sometimes depend on how the
+                value was originally assigned and also on the desired
+                intent of the change.
+                In particular, when you append a value to a variable that
+                has a default value, the resulting value might not be what
+                you expect.
+                In this case, the value you provide might replace the value
+                rather than append to the default value.
+            </para>
+
+            <para>
+                If after you have changed a variable's value and something
+                unexplained occurs, you can use BitBake to check the actual
+                value of the suspect variable.
+                You can make these checks for both configuration and recipe
+                level changes:
+                <itemizedlist>
+                    <listitem><para>
+                        For configuration changes, use the following:
+                        <literallayout class='monospaced'>
+     $ bitbake -e
+                        </literallayout>
+                        This command displays variable values after the
+                        configuration files (i.e. <filename>local.conf</filename>,
+                        <filename>bblayers.conf</filename>,
+                        <filename>bitbake.conf</filename> and so forth) have
+                        been parsed.
+                        <note>
+                            Variables that are exported to the environment are
+                            preceded by the string "export" in the command's
+                            output.
+                        </note>
+                        </para></listitem>
+                    <listitem><para>
+                        For recipe changes, use the following:
+                        <literallayout class='monospaced'>
+     $ bitbake <replaceable>recipe</replaceable> -e | grep VARIABLE="
+                        </literallayout>
+                        This command checks to see if the variable actually
+                        makes it into a specific recipe.
+                        </para></listitem>
+                </itemizedlist>
+            </para>
+        </section>
+
         <section id='line-joining'>
             <title>Line Joining</title>
 
@@ -297,9 +369,8 @@
 
             <para>
                 These operators differ from the ":=", ".=", "=.", "+=", and "=+"
-                operators in that their effects are deferred
-                until after parsing completes rather than being immediately
-                applied.
+                operators in that their effects are applied at variable
+                expansion time rather than being immediately applied.
                 Here are some examples:
                 <literallayout class='monospaced'>
      B = "bval"
@@ -348,18 +419,22 @@
      FOO = "123 456 789 123456 123 456 123 456"
      FOO_remove = "123"
      FOO_remove = "456"
-     FOO2 = "abc def ghi abcdef abc def abc def"
-     FOO2_remove = "abc def"
+     FOO2 = " abc  def ghi abcdef abc def abc  def def"
+     FOO2_remove = " \
+         def \
+         abc \
+         ghi \
+     "
                 </literallayout>
                 The variable <filename>FOO</filename> becomes
-                "&nbsp;&nbsp;789 123456&nbsp;&nbsp;&nbsp;&nbsp;"
+                "&nbsp;&nbsp;789&nbsp;123456&nbsp;&nbsp;&nbsp;&nbsp;"
                 and <filename>FOO2</filename> becomes
-                "&nbsp;&nbsp;ghi abcdef&nbsp;&nbsp;&nbsp;&nbsp;".
+                "&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;jkl&nbsp;&nbsp;abcdef&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;".
             </para>
 
             <para>
                 Like "_append" and "_prepend", "_remove"
-                is deferred until after parsing completes.
+                is applied at variable expansion time.
             </para>
         </section>
 
@@ -503,7 +578,7 @@
         </section>
 
         <section id='unsetting-variables'>
-            <title>Unseting variables</title>
+            <title>Unsetting variables</title>
 
             <para>
                 It is possible to completely remove a variable or a variable flag
@@ -595,7 +670,7 @@
 
         <para>
             BitBake uses
-            <link linkend='var-OVERRIDES'><filename>OVERRIDES</filename></link>
+            <link linkend='var-bb-OVERRIDES'><filename>OVERRIDES</filename></link>
             to control what variables are overridden after BitBake
             parses recipes and configuration files.
             This section describes how you can use
@@ -705,7 +780,7 @@
 
                         <para>Internally, this is implemented by prepending
                         the task (e.g. "task-compile:") to the value of
-                        <link linkend='var-OVERRIDES'><filename>OVERRIDES</filename></link>
+                        <link linkend='var-bb-OVERRIDES'><filename>OVERRIDES</filename></link>
                         for the local datastore of the <filename>do_compile</filename>
                         task.</para>
 
@@ -724,17 +799,15 @@
             <title>Key Expansion</title>
 
             <para>
-                Key expansion happens when the BitBake datastore is finalized
-                just before BitBake expands overrides.
+                Key expansion happens when the BitBake datastore is finalized.
                 To better understand this, consider the following example:
                 <literallayout class='monospaced'>
      A${B} = "X"
      B = "2"
      A2 = "Y"
                 </literallayout>
-                In this case, after all the parsing is complete, and
-                before any overrides are handled, BitBake expands
-                <filename>${B}</filename> into "2".
+                In this case, after all the parsing is complete,
+                BitBake expands <filename>${B}</filename> into "2".
                 This expansion causes <filename>A2</filename>, which was
                 set to "Y" before the expansion, to become "X".
             </para>
@@ -868,7 +941,7 @@
 
             <para>
                 BitBake uses the
-                <link linkend='var-BBPATH'><filename>BBPATH</filename></link>
+                <link linkend='var-bb-BBPATH'><filename>BBPATH</filename></link>
                 variable to locate needed include and class files.
                 Additionally, BitBake searches the current directory for
                 <filename>include</filename> and <filename>require</filename>
@@ -1086,7 +1159,7 @@
             <para>
                 When creating a configuration file (<filename>.conf</filename>),
                 you can use the
-                <link linkend='var-INHERIT'><filename>INHERIT</filename></link>
+                <link linkend='var-bb-INHERIT'><filename>INHERIT</filename></link>
                 configuration directive to inherit a class.
                 BitBake only supports this directive when used within
                 a configuration file.
@@ -1370,7 +1443,7 @@
                         </para></listitem>
                     <listitem><para>
                         BitBake-style Python functions generate a separate
-                        <filename>${</filename><link linkend='var-T'><filename>T</filename></link><filename>}/run.</filename><replaceable>function-name</replaceable><filename>.</filename><replaceable>pid</replaceable>
+                        <filename>${</filename><link linkend='var-bb-T'><filename>T</filename></link><filename>}/run.</filename><replaceable>function-name</replaceable><filename>.</filename><replaceable>pid</replaceable>
                         script that is executed to run the function, and also
                         generate a log file in
                         <filename>${T}/log.</filename><replaceable>function-name</replaceable><filename>.</filename><replaceable>pid</replaceable>
@@ -1773,7 +1846,7 @@
                     things exported or listed in its whitelist to ensure that the build
                     environment is reproducible and consistent.
                     You can prevent this "cleaning" by setting the
-                    <link linkend='var-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link>
+                    <link linkend='var-bb-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link>
                     variable.
                 </note>
                 Consequently, if you do want something to get passed into the
@@ -1783,9 +1856,9 @@
                         Tell BitBake to load what you want from the environment
                         into the datastore.
                         You can do so through the
-                        <link linkend='var-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>
+                        <link linkend='var-bb-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>
                         and
-                        <link linkend='var-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link>
+                        <link linkend='var-bb-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link>
                         variables.
                         For example, assume you want to prevent the build system from
                         accessing your <filename>$HOME/.ccache</filename>
@@ -1824,7 +1897,7 @@
                 from the original execution environment.
                 Bitbake saves a copy of the original environment into
                 a special variable named
-                <link linkend='var-BB_ORIGENV'><filename>BB_ORIGENV</filename></link>.
+                <link linkend='var-bb-BB_ORIGENV'><filename>BB_ORIGENV</filename></link>.
             </para>
 
             <para>
@@ -1883,7 +1956,7 @@
                 <listitem><para><emphasis><filename>[depends]</filename>:</emphasis>
                     Controls inter-task dependencies.
                     See the
-                    <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
+                    <link linkend='var-bb-DEPENDS'><filename>DEPENDS</filename></link>
                     variable and the
                     "<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
                     section for more information.
@@ -1891,7 +1964,7 @@
                 <listitem><para><emphasis><filename>[deptask]</filename>:</emphasis>
                     Controls task build-time dependencies.
                     See the
-                    <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
+                    <link linkend='var-bb-DEPENDS'><filename>DEPENDS</filename></link>
                     variable and the
                     "<link linkend='build-dependencies'>Build Dependencies</link>"
                     section for more information.
@@ -1937,7 +2010,7 @@
                     of cores but certain tasks need to be rate-limited due to various
                     kinds of resource constraints (e.g. to avoid network throttling).
                     <filename>number_threads</filename> works similarly to the
-                    <link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
+                    <link linkend='var-bb-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
                     variable but is task-specific.</para>
 
                     <para>Set the value globally.
@@ -1971,9 +2044,9 @@
                 <listitem><para><emphasis><filename>[rdepends]</filename>:</emphasis>
                     Controls inter-task runtime dependencies.
                     See the
-                    <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
+                    <link linkend='var-bb-RDEPENDS'><filename>RDEPENDS</filename></link>
                     variable, the
-                    <link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
+                    <link linkend='var-bb-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
                     variable, and the
                     "<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
                     section for more information.
@@ -1981,9 +2054,9 @@
                 <listitem><para><emphasis><filename>[rdeptask]</filename>:</emphasis>
                     Controls task runtime dependencies.
                     See the
-                    <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
+                    <link linkend='var-bb-RDEPENDS'><filename>RDEPENDS</filename></link>
                     variable, the
-                    <link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
+                    <link linkend='var-bb-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
                     variable, and the
                     "<link linkend='runtime-dependencies'>Runtime Dependencies</link>"
                     section for more information.
@@ -1996,9 +2069,9 @@
                 <listitem><para><emphasis><filename>[recrdeptask]</filename>:</emphasis>
                     Controls task recursive runtime dependencies.
                     See the
-                    <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
+                    <link linkend='var-bb-RDEPENDS'><filename>RDEPENDS</filename></link>
                     variable, the
-                    <link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
+                    <link linkend='var-bb-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
                     variable, and the
                     "<link linkend='recursive-dependencies'>Recursive Dependencies</link>"
                     section for more information.
@@ -2127,7 +2200,7 @@
                     Any given datastore only has one such event executed
                     against it, however.
                     If
-                    <link linkende='var-BB_INVALIDCONF'><filename>BB_INVALIDCONF</filename></link>
+                    <link linkende='var-bb-BB_INVALIDCONF'><filename>BB_INVALIDCONF</filename></link>
                     is set in the datastore by the event handler, the
                     configuration is reparsed and a new event triggered,
                     allowing the metadata to update configuration.
@@ -2256,17 +2329,17 @@
             from a single recipe file multiple incarnations of that
             recipe file where all incarnations are buildable.
             These features are enabled through the
-            <link linkend='var-BBCLASSEXTEND'><filename>BBCLASSEXTEND</filename></link>
+            <link linkend='var-bb-BBCLASSEXTEND'><filename>BBCLASSEXTEND</filename></link>
             and
-            <link linkend='var-BBVERSIONS'><filename>BBVERSIONS</filename></link>
+            <link linkend='var-bb-BBVERSIONS'><filename>BBVERSIONS</filename></link>
             variables.
             <note>
                 The mechanism for this class extension is extremely
                 specific to the implementation.
                 Usually, the recipe's
-                <link linkend='var-PROVIDES'><filename>PROVIDES</filename></link>,
-                <link linkend='var-PN'><filename>PN</filename></link>, and
-                <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
+                <link linkend='var-bb-PROVIDES'><filename>PROVIDES</filename></link>,
+                <link linkend='var-bb-PN'><filename>PN</filename></link>, and
+                <link linkend='var-bb-DEPENDS'><filename>DEPENDS</filename></link>
                 variables would need to be modified by the extension class.
                 For specific examples, see the OE-Core
                 <filename>native</filename>, <filename>nativesdk</filename>,
@@ -2287,7 +2360,7 @@
                     project from a single recipe file.
                     You can also specify conditional metadata
                     (using the
-                    <link linkend='var-OVERRIDES'><filename>OVERRIDES</filename></link>
+                    <link linkend='var-bb-OVERRIDES'><filename>OVERRIDES</filename></link>
                     mechanism) for a single version, or an optionally named range of versions.
                     Here is an example:
                     <literallayout class='monospaced'>
@@ -2306,7 +2379,7 @@
                     into overrides, but it is also made available for the metadata to use
                     in the variable that defines the base recipe versions for use in
                     <filename>file://</filename> search paths
-                    (<link linkend='var-FILESPATH'><filename>FILESPATH</filename></link>).
+                    (<link linkend='var-bb-FILESPATH'><filename>FILESPATH</filename></link>).
                     </para></listitem>
             </itemizedlist>
         </para>
@@ -2408,7 +2481,7 @@
 
             <para>
                 BitBake uses the
-                <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
+                <link linkend='var-bb-DEPENDS'><filename>DEPENDS</filename></link>
                 variable to manage build time dependencies.
                 The <filename>[deptask]</filename> varflag for tasks
                 signifies the task of each
@@ -2429,9 +2502,9 @@
 
             <para>
                 BitBake uses the
-                <link linkend='var-PACKAGES'><filename>PACKAGES</filename></link>,
-                <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>, and
-                <link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
+                <link linkend='var-bb-PACKAGES'><filename>PACKAGES</filename></link>,
+                <link linkend='var-bb-RDEPENDS'><filename>RDEPENDS</filename></link>, and
+                <link linkend='var-bb-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
                 variables to manage runtime dependencies.
             </para>
 
@@ -2686,7 +2759,7 @@
 
         <para>
             These checksums are stored in
-            <link linkend='var-STAMP'><filename>STAMP</filename></link>.
+            <link linkend='var-bb-STAMP'><filename>STAMP</filename></link>.
             You can examine the checksums using the following BitBake command:
             <literallayout class='monospaced'>
      $ bitbake-dumpsigs
@@ -2708,44 +2781,44 @@
             The following list describes related variables:
             <itemizedlist>
                 <listitem><para>
-                    <link linkend='var-BB_HASHCHECK_FUNCTION'><filename>BB_HASHCHECK_FUNCTION</filename></link>:
+                    <link linkend='var-bb-BB_HASHCHECK_FUNCTION'><filename>BB_HASHCHECK_FUNCTION</filename></link>:
                     Specifies the name of the function to call during
                     the "setscene" part of the task's execution in order
                     to validate the list of task hashes.
                     </para></listitem>
                 <listitem><para>
-                    <link linkend='var-BB_SETSCENE_DEPVALID'><filename>BB_SETSCENE_DEPVALID</filename></link>:
+                    <link linkend='var-bb-BB_SETSCENE_DEPVALID'><filename>BB_SETSCENE_DEPVALID</filename></link>:
                     Specifies a function BitBake calls that determines
                     whether BitBake requires a setscene dependency to
                     be met.
                     </para></listitem>
                 <listitem><para>
-                    <link linkend='var-BB_SETSCENE_VERIFY_FUNCTION2'><filename>BB_SETSCENE_VERIFY_FUNCTION2</filename></link>:
+                    <link linkend='var-bb-BB_SETSCENE_VERIFY_FUNCTION2'><filename>BB_SETSCENE_VERIFY_FUNCTION2</filename></link>:
                     Specifies a function to call that verifies the list of
                     planned task execution before the main task execution
                     happens.
                     </para></listitem>
                 <listitem><para>
-                    <link linkend='var-BB_STAMP_POLICY'><filename>BB_STAMP_POLICY</filename></link>:
+                    <link linkend='var-bb-BB_STAMP_POLICY'><filename>BB_STAMP_POLICY</filename></link>:
                     Defines the mode for comparing timestamps of stamp files.
                     </para></listitem>
                 <listitem><para>
-                    <link linkend='var-BB_STAMP_WHITELIST'><filename>BB_STAMP_WHITELIST</filename></link>:
+                    <link linkend='var-bb-BB_STAMP_WHITELIST'><filename>BB_STAMP_WHITELIST</filename></link>:
                     Lists stamp files that are looked at when the stamp policy
                     is "whitelist".
                     </para></listitem>
                 <listitem><para>
-                    <link linkend='var-BB_TASKHASH'><filename>BB_TASKHASH</filename></link>:
+                    <link linkend='var-bb-BB_TASKHASH'><filename>BB_TASKHASH</filename></link>:
                     Within an executing task, this variable holds the hash
                     of the task as returned by the currently enabled
                     signature generator.
                     </para></listitem>
                 <listitem><para>
-                    <link linkend='var-STAMP'><filename>STAMP</filename></link>:
+                    <link linkend='var-bb-STAMP'><filename>STAMP</filename></link>:
                     The base path to create stamp files.
                     </para></listitem>
                 <listitem><para>
-                    <link linkend='var-STAMPCLEAN'><filename>STAMPCLEAN</filename></link>:
+                    <link linkend='var-bb-STAMPCLEAN'><filename>STAMPCLEAN</filename></link>:
                     Again, the base path to create stamp files but can use wildcards
                     for matching a range of files for clean operations.
                     </para></listitem>

File diff suppressed because it is too large
+ 137 - 137
bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml


+ 0 - 8
bitbake/doc/poky.ent

@@ -17,13 +17,6 @@
 <!ENTITY OE_DOCS_URL "http://docs.openembedded.org">
 <!ENTITY OH_HOME_URL "http://o-hand.com">
 <!ENTITY BITBAKE_HOME_URL "http://developer.berlios.de/projects/bitbake/">
-<!ENTITY ECLIPSE_MAIN_URL "http://www.eclipse.org/downloads">
-<!ENTITY ECLIPSE_DL_URL "http://download.eclipse.org">
-<!ENTITY ECLIPSE_DL_PLUGIN_URL "&YOCTO_DL_URL;/releases/eclipse-plugin/&DISTRO;">
-<!ENTITY ECLIPSE_UPDATES_URL "&ECLIPSE_DL_URL;/tm/updates/3.3">
-<!ENTITY ECLIPSE_INDIGO_URL "&ECLIPSE_DL_URL;/releases/indigo">
-<!ENTITY ECLIPSE_JUNO_URL "&ECLIPSE_DL_URL;/releases/juno">
-<!ENTITY ECLIPSE_INDIGO_CDT_URL "&ECLIPSE_DL_URL;tools/cdt/releases/indigo">
 <!ENTITY YOCTO_DOCS_URL "&YOCTO_HOME_URL;/docs">
 <!ENTITY YOCTO_SOURCES_URL "&YOCTO_HOME_URL;/sources/">
 <!ENTITY YOCTO_AB_PORT_URL "&YOCTO_AB_URL;:8010">
@@ -31,7 +24,6 @@
 <!ENTITY YOCTO_POKY_URL "&YOCTO_DL_URL;/releases/poky/">
 <!ENTITY YOCTO_RELEASE_DL_URL "&YOCTO_DL_URL;/releases/yocto/yocto-&DISTRO;">
 <!ENTITY YOCTO_TOOLCHAIN_DL_URL "&YOCTO_RELEASE_DL_URL;/toolchain/">
-<!ENTITY YOCTO_ECLIPSE_DL_URL "&YOCTO_RELEASE_DL_URL;/eclipse-plugin/indigo;">
 <!ENTITY YOCTO_ADTINSTALLER_DL_URL "&YOCTO_RELEASE_DL_URL;/adt_installer">
 <!ENTITY YOCTO_POKY_DL_URL "&YOCTO_RELEASE_DL_URL;/&YOCTO_POKY;.tar.bz2">
 <!ENTITY YOCTO_MACHINES_DL_URL "&YOCTO_RELEASE_DL_URL;/machines">

+ 0 - 15
bitbake/lib/bb/COW.py

@@ -1,23 +1,8 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # This is a copy on write dictionary and set which abuses classes to try and be nice and fast.
 #
 # Copyright (C) 2006 Tim Ansell
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
 #Please Note:
 # Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW.
 # Assign a file to __warn__ to get warnings about slow operations.

+ 2 - 14
bitbake/lib/bb/__init__.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # BitBake Build System Python Library
 #
@@ -8,20 +6,10 @@
 #
 # Based on Gentoo's portage.py.
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
-__version__ = "1.40.0"
+__version__ = "1.44.0"
 
 import sys
 if sys.version_info < (3, 4, 0):

+ 78 - 73
bitbake/lib/bb/build.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # BitBake 'Build' implementation
 #
@@ -10,18 +8,7 @@
 #
 # Based on Gentoo's portage.py.
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
 #
 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
 
@@ -67,23 +54,6 @@ else:
 builtins['bb'] = bb
 builtins['os'] = os
 
-class FuncFailed(Exception):
-    def __init__(self, name = None, logfile = None):
-        self.logfile = logfile
-        self.name = name
-        if name:
-            self.msg = 'Function failed: %s' % name
-        else:
-            self.msg = "Function failed"
-
-    def __str__(self):
-        if self.logfile and os.path.exists(self.logfile):
-            msg = ("%s (log file is located at %s)" %
-                   (self.msg, self.logfile))
-        else:
-            msg = self.msg
-        return msg
-
 class TaskBase(event.Event):
     """Base class for task events"""
 
@@ -176,15 +146,33 @@ class LogTee(object):
 
     def __repr__(self):
         return '<LogTee {0}>'.format(self.name)
+
     def flush(self):
         self.outfile.flush()
 
-#
-# pythonexception allows the python exceptions generated to be raised
-# as the real exceptions (not FuncFailed) and without a backtrace at the 
-# origin of the failure.
-#
-def exec_func(func, d, dirs = None, pythonexception=False):
+
+class StdoutNoopContextManager:
+    """
+    This class acts like sys.stdout, but adds noop __enter__ and __exit__ methods.
+    """
+    def __enter__(self):
+        return sys.stdout
+
+    def __exit__(self, *exc_info):
+        pass
+
+    def write(self, string):
+        return sys.stdout.write(string)
+
+    def flush(self):
+        sys.stdout.flush()
+
+    @property
+    def name(self):
+        return sys.stdout.name
+
+
+def exec_func(func, d, dirs = None):
     """Execute a BB 'function'"""
 
     try:
@@ -256,7 +244,7 @@ def exec_func(func, d, dirs = None, pythonexception=False):
 
     with bb.utils.fileslocked(lockfiles):
         if ispython:
-            exec_func_python(func, d, runfile, cwd=adir, pythonexception=pythonexception)
+            exec_func_python(func, d, runfile, cwd=adir)
         else:
             exec_func_shell(func, d, runfile, cwd=adir)
 
@@ -276,7 +264,7 @@ _functionfmt = """
 {function}(d)
 """
 logformatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
-def exec_func_python(func, d, runfile, cwd=None, pythonexception=False):
+def exec_func_python(func, d, runfile, cwd=None):
     """Execute a python BB 'function'"""
 
     code = _functionfmt.format(function=func)
@@ -301,13 +289,7 @@ def exec_func_python(func, d, runfile, cwd=None, pythonexception=False):
         bb.methodpool.insert_method(func, text, fn, lineno - 1)
 
         comp = utils.better_compile(code, func, "exec_python_func() autogenerated")
-        utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated", pythonexception=pythonexception)
-    except (bb.parse.SkipRecipe, bb.build.FuncFailed):
-        raise
-    except:
-        if pythonexception:
-            raise
-        raise FuncFailed(func, None)
+        utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated")
     finally:
         bb.debug(2, "Python function %s finished" % func)
 
@@ -335,6 +317,42 @@ trap 'bb_exit_handler' 0
 set -e
 '''
 
+def create_progress_handler(func, progress, logfile, d):
+    if progress == 'percent':
+        # Use default regex
+        return bb.progress.BasicProgressHandler(d, outfile=logfile)
+    elif progress.startswith('percent:'):
+        # Use specified regex
+        return bb.progress.BasicProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile)
+    elif progress.startswith('outof:'):
+        # Use specified regex
+        return bb.progress.OutOfProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile)
+    elif progress.startswith("custom:"):
+        # Use a custom progress handler that was injected via OE_EXTRA_IMPORTS or __builtins__
+        import functools
+        from types import ModuleType
+
+        parts = progress.split(":", 2)
+        _, cls, otherargs = parts[0], parts[1], (parts[2] or None) if parts[2:] else None
+        if cls:
+            def resolve(x, y):
+                if not x:
+                    return None
+                if isinstance(x, ModuleType):
+                    return getattr(x, y, None)
+                return x.get(y)
+            cls_obj = functools.reduce(resolve, cls.split("."), bb.utils._context)
+            if not cls_obj:
+                # Fall-back on __builtins__
+                cls_obj = functools.reduce(lambda x, y: x.get(y), cls.split("."), __builtins__)
+            if cls_obj:
+                return cls_obj(d, outfile=logfile, otherargs=otherargs)
+            bb.warn('%s: unknown custom progress handler in task progress varflag value "%s", ignoring' % (func, cls))
+    else:
+        bb.warn('%s: invalid task progress varflag value "%s", ignoring' % (func, progress))
+
+    return logfile
+
 def exec_func_shell(func, d, runfile, cwd=None):
     """Execute a shell function from the metadata
 
@@ -372,23 +390,13 @@ exit $ret
             cmd = [fakerootcmd, runfile]
 
     if bb.msg.loggerDefaultVerbose:
-        logfile = LogTee(logger, sys.stdout)
+        logfile = LogTee(logger, StdoutNoopContextManager())
     else:
-        logfile = sys.stdout
+        logfile = StdoutNoopContextManager()
 
     progress = d.getVarFlag(func, 'progress')
     if progress:
-        if progress == 'percent':
-            # Use default regex
-            logfile = bb.progress.BasicProgressHandler(d, outfile=logfile)
-        elif progress.startswith('percent:'):
-            # Use specified regex
-            logfile = bb.progress.BasicProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile)
-        elif progress.startswith('outof:'):
-            # Use specified regex
-            logfile = bb.progress.OutOfProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile)
-        else:
-            bb.warn('%s: invalid task progress varflag value "%s", ignoring' % (func, progress))
+        logfile = create_progress_handler(func, progress, logfile, d)
 
     fifobuffer = bytearray()
     def readfifo(data):
@@ -407,6 +415,8 @@ exit $ret
                     bb.plain(value)
                 elif cmd == 'bbnote':
                     bb.note(value)
+                elif cmd == 'bbverbnote':
+                    bb.verbnote(value)
                 elif cmd == 'bbwarn':
                     bb.warn(value)
                 elif cmd == 'bberror':
@@ -436,13 +446,8 @@ exit $ret
     with open(fifopath, 'r+b', buffering=0) as fifo:
         try:
             bb.debug(2, "Executing shell function %s" % func)
-
-            try:
-                with open(os.devnull, 'r+') as stdin:
-                    bb.process.run(cmd, shell=False, stdin=stdin, log=logfile, extrafiles=[(fifo,readfifo)])
-            except bb.process.CmdError:
-                logfn = d.getVar('BB_LOGFILE')
-                raise FuncFailed(func, logfn)
+            with open(os.devnull, 'r+') as stdin, logfile:
+                bb.process.run(cmd, shell=False, stdin=stdin, log=logfile, extrafiles=[(fifo,readfifo)])
         finally:
             os.unlink(fifopath)
 
@@ -570,9 +575,6 @@ def _exec_task(fn, task, d, quieterr):
             event.fire(TaskStarted(task, logfn, flags, localdata), localdata)
         except (bb.BBHandledException, SystemExit):
             return 1
-        except FuncFailed as exc:
-            logger.error(str(exc))
-            return 1
 
         try:
             for func in (prefuncs or '').split():
@@ -580,7 +582,10 @@ def _exec_task(fn, task, d, quieterr):
             exec_func(task, localdata)
             for func in (postfuncs or '').split():
                 exec_func(func, localdata)
-        except FuncFailed as exc:
+        except bb.BBHandledException:
+            event.fire(TaskFailed(task, logfn, localdata, True), localdata)
+            return 1
+        except Exception as exc:
             if quieterr:
                 event.fire(TaskFailedSilent(task, logfn, localdata), localdata)
             else:
@@ -588,9 +593,6 @@ def _exec_task(fn, task, d, quieterr):
                 logger.error(str(exc))
                 event.fire(TaskFailed(task, logfn, localdata, errprinted), localdata)
             return 1
-        except bb.BBHandledException:
-            event.fire(TaskFailed(task, logfn, localdata, True), localdata)
-            return 1
     finally:
         sys.stdout.flush()
         sys.stderr.flush()
@@ -814,6 +816,9 @@ def add_tasks(tasklist, d):
         task_deps['parents'][task] = []
         if 'deps' in flags:
             for dep in flags['deps']:
+                # Check and warn for "addtask task after foo" while foo does not exist
+                #if not dep in tasklist:
+                #    bb.warn('%s: dependent task %s for %s does not exist' % (d.getVar('PN'), dep, task))
                 dep = d.expand(dep)
                 task_deps['parents'][task].append(dep)
 

+ 78 - 31
bitbake/lib/bb/cache.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # BitBake Cache implementation
 #
@@ -15,18 +13,8 @@
 # Copyright (C) 2005        Holger Hans Peter Freyther
 # Copyright (C) 2005        ROAD GmbH
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import os
 import sys
@@ -95,21 +83,21 @@ class CoreRecipeInfo(RecipeInfoCommon):
         self.appends = self.listvar('__BBAPPEND', metadata)
         self.nocache = self.getvar('BB_DONT_CACHE', metadata)
 
+        self.provides  = self.depvar('PROVIDES', metadata)
+        self.rprovides = self.depvar('RPROVIDES', metadata)
+        self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
+        self.packages = self.listvar('PACKAGES', metadata)
+        if not self.packages:
+            self.packages.append(self.pn)
+        self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
+
         self.skipreason = self.getvar('__SKIPPED', metadata)
         if self.skipreason:
-            self.pn = self.getvar('PN', metadata) or bb.parse.BBHandler.vars_from_file(filename,metadata)[0]
             self.skipped = True
-            self.provides  = self.depvar('PROVIDES', metadata)
-            self.rprovides = self.depvar('RPROVIDES', metadata)
             return
 
         self.tasks = metadata.getVar('__BBTASKS', False)
 
-        self.pn = self.getvar('PN', metadata)
-        self.packages = self.listvar('PACKAGES', metadata)
-        if not self.packages:
-            self.packages.append(self.pn)
-
         self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
         self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
 
@@ -125,11 +113,8 @@ class CoreRecipeInfo(RecipeInfoCommon):
         self.stampclean = self.getvar('STAMPCLEAN', metadata)
         self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
         self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
-        self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
         self.depends          = self.depvar('DEPENDS', metadata)
-        self.provides         = self.depvar('PROVIDES', metadata)
         self.rdepends         = self.depvar('RDEPENDS', metadata)
-        self.rprovides        = self.depvar('RPROVIDES', metadata)
         self.rrecommends      = self.depvar('RRECOMMENDS', metadata)
         self.rprovides_pkg    = self.pkgvar('RPROVIDES', self.packages, metadata)
         self.rdepends_pkg     = self.pkgvar('RDEPENDS', self.packages, metadata)
@@ -235,7 +220,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
 
         cachedata.hashfn[fn] = self.hashfilename
         for task, taskhash in self.basetaskhashes.items():
-            identifier = '%s.%s' % (fn, task)
+            identifier = '%s:%s' % (fn, task)
             cachedata.basetaskhash[identifier] = taskhash
 
         cachedata.inherits[fn] = self.inherits
@@ -249,7 +234,7 @@ def virtualfn2realfn(virtualfn):
     Convert a virtual file name to a real one + the associated subclass keyword
     """
     mc = ""
-    if virtualfn.startswith('multiconfig:'):
+    if virtualfn.startswith('mc:'):
         elems = virtualfn.split(':')
         mc = elems[1]
         virtualfn = ":".join(elems[2:])
@@ -270,7 +255,7 @@ def realfn2virtual(realfn, cls, mc):
     if cls:
         realfn = "virtual:" + cls + ":" + realfn
     if mc:
-        realfn = "multiconfig:" + mc + ":" + realfn
+        realfn = "mc:" + mc + ":" + realfn
     return realfn
 
 def variant2virtual(realfn, variant):
@@ -279,11 +264,11 @@ def variant2virtual(realfn, variant):
     """
     if variant == "":
         return realfn
-    if variant.startswith("multiconfig:"):
+    if variant.startswith("mc:"):
         elems = variant.split(":")
         if elems[2]:
-            return "multiconfig:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
-        return "multiconfig:" + elems[1] + ":" + realfn
+            return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
+        return "mc:" + elems[1] + ":" + realfn
     return "virtual:" + variant + ":" + realfn
 
 def parse_recipe(bb_data, bbfile, appends, mc=''):
@@ -361,7 +346,7 @@ class NoCache(object):
             bb_data = self.databuilder.mcdata[mc].createCopy()
             newstores = parse_recipe(bb_data, bbfile, appends, mc)
             for ns in newstores:
-                datastores["multiconfig:%s:%s" % (mc, ns)] = newstores[ns]
+                datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
 
         return datastores
 
@@ -411,6 +396,15 @@ class Cache(NoCache):
         else:
             logger.debug(1, "Cache file %s not found, building..." % self.cachefile)
 
+        # We don't use the symlink, its just for debugging convinience
+        symlink = os.path.join(self.cachedir, "bb_cache.dat")
+        if os.path.exists(symlink):
+            bb.utils.remove(symlink)
+        try:
+            os.symlink(os.path.basename(self.cachefile), symlink)
+        except OSError:
+            pass
+
     def load_cachefile(self):
         cachesize = 0
         previous_progress = 0
@@ -889,3 +883,56 @@ class MultiProcessCache(object):
             p.dump([data, self.__class__.CACHE_VERSION])
 
         bb.utils.unlockfile(glf)
+
+
+class SimpleCache(object):
+    """
+    BitBake multi-process cache implementation
+
+    Used by the codeparser & file checksum caches
+    """
+
+    def __init__(self, version):
+        self.cachefile = None
+        self.cachedata = None
+        self.cacheversion = version
+
+    def init_cache(self, d, cache_file_name=None, defaultdata=None):
+        cachedir = (d.getVar("PERSISTENT_DIR") or
+                    d.getVar("CACHE"))
+        if not cachedir:
+            return defaultdata
+
+        bb.utils.mkdirhier(cachedir)
+        self.cachefile = os.path.join(cachedir,
+                                      cache_file_name or self.__class__.cache_file_name)
+        logger.debug(1, "Using cache in '%s'", self.cachefile)
+
+        glf = bb.utils.lockfile(self.cachefile + ".lock")
+
+        try:
+            with open(self.cachefile, "rb") as f:
+                p = pickle.Unpickler(f)
+                data, version = p.load()
+        except:
+            bb.utils.unlockfile(glf)
+            return defaultdata
+
+        bb.utils.unlockfile(glf)
+
+        if version != self.cacheversion:
+            return defaultdata
+
+        return data
+
+    def save(self, data):
+        if not self.cachefile:
+            return
+
+        glf = bb.utils.lockfile(self.cachefile + ".lock")
+
+        with open(self.cachefile, "wb") as f:
+            p = pickle.Pickler(f, -1)
+            p.dump([data, self.cacheversion])
+
+        bb.utils.unlockfile(glf)

+ 1 - 13
bitbake/lib/bb/cache_extra.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # Extra RecipeInfo will be all defined in this file. Currently,
 # Only Hob (Image Creator) Requests some extra fields. So
@@ -12,18 +10,8 @@
 
 # Copyright (C) 2011, Intel Corporation. All rights reserved.
 
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 from bb.cache import RecipeInfoCommon
 

+ 1 - 11
bitbake/lib/bb/checksum.py

@@ -2,18 +2,8 @@
 #
 # Copyright (C) 2012 Intel Corporation
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import glob
 import operator

+ 9 - 4
bitbake/lib/bb/codeparser.py

@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
 """
 BitBake code parser
 
@@ -33,7 +37,7 @@ from bb.cache import MultiProcessCache
 logger = logging.getLogger('BitBake.CodeParser')
 
 def bbhash(s):
-    return hashlib.md5(s.encode("utf-8")).hexdigest()
+    return hashlib.sha256(s.encode("utf-8")).hexdigest()
 
 def check_indent(codestr):
     """If the code is indented, add a top level piece of code to 'remove' the indentation"""
@@ -140,7 +144,7 @@ class CodeParserCache(MultiProcessCache):
     # so that an existing cache gets invalidated. Additionally you'll need
     # to increment __cache_version__ in cache.py in order to ensure that old
     # recipe caches don't trigger "Taskhash mismatch" errors.
-    CACHE_VERSION = 10
+    CACHE_VERSION = 11
 
     def __init__(self):
         MultiProcessCache.__init__(self)
@@ -368,8 +372,9 @@ class ShellParser():
     def _parse_shell(self, value):
         try:
             tokens, _ = pyshyacc.parse(value, eof=True, debug=False)
-        except pyshlex.NeedMore:
-            raise sherrors.ShellSyntaxError("Unexpected EOF")
+        except Exception:
+            bb.error('Error during parse shell code, the last 5 lines are:\n%s' % '\n'.join(value.split('\n')[-5:]))
+            raise
 
         self.process_tokens(tokens)
 

+ 1 - 11
bitbake/lib/bb/command.py

@@ -6,18 +6,8 @@ Provide an interface to interact with the bitbake server through 'commands'
 
 # Copyright (C) 2006-2007  Richard Purdie
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 """
 The bitbake server takes 'commands' from its UI/commandline.

+ 4 - 0
bitbake/lib/bb/compat.py

@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
 """Code pulled from future python versions, here for compatibility"""
 
 from collections import MutableMapping, KeysView, ValuesView, ItemsView, OrderedDict

+ 111 - 107
bitbake/lib/bb/cooker.py

@@ -1,6 +1,3 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # Copyright (C) 2003, 2004  Chris Larson
 # Copyright (C) 2003, 2004  Phil Blundell
@@ -9,19 +6,8 @@
 # Copyright (C) 2005        ROAD GmbH
 # Copyright (C) 2006 - 2007 Richard Purdie
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
 
 import sys, os, glob, os.path, re, time
 import atexit
@@ -45,6 +31,7 @@ import pyinotify
 import json
 import pickle
 import codecs
+import hashserv
 
 logger      = logging.getLogger("BitBake")
 collectlog  = logging.getLogger("BitBake.Collection")
@@ -175,27 +162,45 @@ class BBCooker:
 
         self.configuration = configuration
 
+        bb.debug(1, "BBCooker starting %s" % time.time())
+        sys.stdout.flush()
+
         self.configwatcher = pyinotify.WatchManager()
+        bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
+        sys.stdout.flush()
+
         self.configwatcher.bbseen = []
         self.configwatcher.bbwatchedfiles = []
         self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
+        bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
+        sys.stdout.flush()
         self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
                          pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
                          pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
         self.watcher = pyinotify.WatchManager()
+        bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
+        sys.stdout.flush()
         self.watcher.bbseen = []
         self.watcher.bbwatchedfiles = []
         self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
 
+        bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
+        sys.stdout.flush()
+
         # If being called by something like tinfoil, we need to clean cached data
         # which may now be invalid
         bb.parse.clear_cache()
         bb.parse.BBHandler.cached_statements = {}
 
         self.ui_cmdline = None
+        self.hashserv = None
+        self.hashservaddr = None
 
         self.initConfigurationData()
 
+        bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
+        sys.stdout.flush()
+
         # we log all events to a file if so directed
         if self.configuration.writeeventlog:
             # register the log file writer as UI Handler
@@ -233,6 +238,9 @@ class BBCooker:
         # Let SIGHUP exit as SIGTERM
         signal.signal(signal.SIGHUP, self.sigterm_exception)
 
+        bb.debug(1, "BBCooker startup complete %s" % time.time())
+        sys.stdout.flush()
+
     def process_inotify_updates(self):
         for n in [self.confignotifier, self.notifier]:
             if n.check_events(timeout=0):
@@ -367,13 +375,12 @@ class BBCooker:
         # Copy of the data store which has been expanded.
         # Used for firing events and accessing variables where expansion needs to be accounted for
         #
-        bb.parse.init_parser(self.data)
-
         if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
             self.disableDataTracking()
 
-        self.data.renameVar("__depends", "__base_depends")
-        self.add_filewatch(self.data.getVar("__base_depends", False), self.configwatcher)
+        for mc in self.databuilder.mcdata.values():
+            mc.renameVar("__depends", "__base_depends")
+            self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
 
         self.baseconfig_valid = True
         self.parsecache_valid = False
@@ -385,6 +392,22 @@ class BBCooker:
         except prserv.serv.PRServiceConfigError as e:
             bb.fatal("Unable to start PR Server, exitting")
 
+        if self.data.getVar("BB_HASHSERVE") == "auto":
+            # Create a new hash server bound to a unix domain socket
+            if not self.hashserv:
+                dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
+                self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
+                self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False)
+                self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever)
+                self.hashserv.process.start()
+            self.data.setVar("BB_HASHSERVE", self.hashservaddr)
+            self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
+            self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
+            for mc in self.databuilder.mcdata:
+                self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
+
+        bb.parse.init_parser(self.data)
+
     def enableDataTracking(self):
         self.configuration.tracking = True
         if hasattr(self, "data"):
@@ -488,6 +511,7 @@ class BBCooker:
         """
         fn = None
         envdata = None
+        mc = ''
         if not pkgs_to_build:
             pkgs_to_build = []
 
@@ -496,6 +520,12 @@ class BBCooker:
             self.enableDataTracking()
             self.reset()
 
+        def mc_base(p):
+            if p.startswith('mc:'):
+                s = p.split(':')
+                if len(s) == 2:
+                    return s[1]
+            return None
 
         if buildfile:
             # Parse the configuration here. We need to do it explicitly here since
@@ -506,18 +536,16 @@ class BBCooker:
             fn = self.matchFile(fn)
             fn = bb.cache.realfn2virtual(fn, cls, mc)
         elif len(pkgs_to_build) == 1:
-            ignore = self.data.getVar("ASSUME_PROVIDED") or ""
-            if pkgs_to_build[0] in set(ignore.split()):
-                bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
+            mc = mc_base(pkgs_to_build[0])
+            if not mc:
+                ignore = self.data.getVar("ASSUME_PROVIDED") or ""
+                if pkgs_to_build[0] in set(ignore.split()):
+                    bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
 
-            taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
+                taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
 
-            mc = runlist[0][0]
-            fn = runlist[0][3]
-        else:
-            envdata = self.data
-            data.expandKeys(envdata)
-            parse.ast.runAnonFuncs(envdata)
+                mc = runlist[0][0]
+                fn = runlist[0][3]
 
         if fn:
             try:
@@ -526,6 +554,12 @@ class BBCooker:
             except Exception as e:
                 parselog.exception("Unable to read %s", fn)
                 raise
+        else:
+            if not mc in self.databuilder.mcdata:
+                bb.fatal('Not multiconfig named "%s" found' % mc)
+            envdata = self.databuilder.mcdata[mc]
+            data.expandKeys(envdata)
+            parse.ast.runAnonFuncs(envdata)
 
         # Display history
         with closing(StringIO()) as env:
@@ -565,10 +599,10 @@ class BBCooker:
         wildcard = False
 
         # Wild card expansion:
-        # Replace string such as "multiconfig:*:bash"
-        # into "multiconfig:A:bash multiconfig:B:bash bash"
+        # Replace string such as "mc:*:bash"
+        # into "mc:A:bash mc:B:bash bash"
         for k in targetlist:
-            if k.startswith("multiconfig:"):
+            if k.startswith("mc:"):
                 if wildcard:
                     bb.fatal('multiconfig conflict')
                 if k.split(":")[1] == "*":
@@ -601,7 +635,7 @@ class BBCooker:
         runlist = []
         for k in fulltargetlist:
             mc = ""
-            if k.startswith("multiconfig:"):
+            if k.startswith("mc:"):
                 mc = k.split(":")[1]
                 k = ":".join(k.split(":")[2:])
             ktask = task
@@ -620,13 +654,22 @@ class BBCooker:
             runlist.append([mc, k, ktask, fn])
             bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
 
-        mcdeps = taskdata[mc].get_mcdepends()
+        havemc = False
+        for mc in self.multiconfigs:
+            if taskdata[mc].get_mcdepends():
+                havemc = True
+
         # No need to do check providers if there are no mcdeps or not an mc build
-        if mcdeps and mc:
-            # Make sure we can provide the multiconfig dependency
+        if havemc or len(self.multiconfigs) > 1:
             seen = set()
             new = True
+            # Make sure we can provide the multiconfig dependency
             while new:
+                mcdeps = set()
+                # Add unresolved first, so we can get multiconfig indirect dependencies on time
+                for mc in self.multiconfigs:
+                    taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
+                    mcdeps |= set(taskdata[mc].get_mcdepends())
                 new = False
                 for mc in self.multiconfigs:
                     for k in mcdeps:
@@ -641,6 +684,7 @@ class BBCooker:
                             taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
                             seen.add(k)
                             new = True
+
         for mc in self.multiconfigs:
             taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
 
@@ -676,7 +720,7 @@ class BBCooker:
     @staticmethod
     def add_mc_prefix(mc, pn):
         if mc:
-            return "multiconfig:%s:%s" % (mc, pn)
+            return "mc:%s:%s" % (mc, pn)
         return pn
 
     def buildDependTree(self, rq, taskdata):
@@ -875,6 +919,10 @@ class BBCooker:
             os.unlink('package-depends.dot')
         except FileNotFoundError:
             pass
+        try:
+            os.unlink('recipe-depends.dot')
+        except FileNotFoundError:
+            pass
 
         with open('task-depends.dot', 'w') as f:
             f.write("digraph depends {\n")
@@ -888,27 +936,6 @@ class BBCooker:
             f.write("}\n")
         logger.info("Task dependencies saved to 'task-depends.dot'")
 
-        with open('recipe-depends.dot', 'w') as f:
-            f.write("digraph depends {\n")
-            pndeps = {}
-            for task in sorted(depgraph["tdepends"]):
-                (pn, taskname) = task.rsplit(".", 1)
-                if pn not in pndeps:
-                    pndeps[pn] = set()
-                for dep in sorted(depgraph["tdepends"][task]):
-                    (deppn, deptaskname) = dep.rsplit(".", 1)
-                    pndeps[pn].add(deppn)
-            for pn in sorted(pndeps):
-                fn = depgraph["pn"][pn]["filename"]
-                version = depgraph["pn"][pn]["version"]
-                f.write('"%s" [label="%s\\n%s\\n%s"]\n' % (pn, pn, version, fn))
-                for dep in sorted(pndeps[pn]):
-                    if dep == pn:
-                        continue
-                    f.write('"%s" -> "%s"\n' % (pn, dep))
-            f.write("}\n")
-        logger.info("Flattened recipe dependencies saved to 'recipe-depends.dot'")
-
     def show_appends_with_no_recipes(self):
         # Determine which bbappends haven't been applied
 
@@ -1191,8 +1218,8 @@ class BBCooker:
                     continue
                 elif regex == "":
                     parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
+                    cre = re.compile('^NULL$')
                     errors = False
-                    continue
                 else:
                     try:
                         cre = re.compile(regex)
@@ -1453,7 +1480,7 @@ class BBCooker:
         ntargets = []
         for target in runlist:
             if target[0]:
-                ntargets.append("multiconfig:%s:%s:%s" % (target[0], target[1], target[2]))
+                ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
             ntargets.append("%s:%s" % (target[1], target[2]))
 
         for mc in self.multiconfigs:
@@ -1576,6 +1603,9 @@ class BBCooker:
         for pkg in pkgs_to_build:
             if pkg in ignore:
                 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
+            if pkg.startswith("multiconfig:"):
+                pkgs_to_build.remove(pkg)
+                pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
 
         if 'world' in pkgs_to_build:
             pkgs_to_build.remove('world')
@@ -1583,7 +1613,7 @@ class BBCooker:
                 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
                 for t in self.recipecaches[mc].world_target:
                     if mc:
-                        t = "multiconfig:" + mc + ":" + t
+                        t = "mc:" + mc + ":" + t
                     pkgs_to_build.append(t)
 
         if 'universe' in pkgs_to_build:
@@ -1602,7 +1632,7 @@ class BBCooker:
                             bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
                             continue
                     if mc:
-                        t = "multiconfig:" + mc + ":" + t
+                        t = "mc:" + mc + ":" + t
                     pkgs_to_build.append(t)
 
         return pkgs_to_build
@@ -1615,9 +1645,11 @@ class BBCooker:
 
     def post_serve(self):
         prserv.serv.auto_shutdown()
+        if self.hashserv:
+            self.hashserv.process.terminate()
+            self.hashserv.process.join()
         bb.event.fire(CookerExit(), self.data)
 
-
     def shutdown(self, force = False):
         if force:
             self.state = state.forceshutdown
@@ -1632,6 +1664,7 @@ class BBCooker:
 
     def reset(self):
         self.initConfigurationData()
+        self.handlePRServ()
 
     def clientComplete(self):
         """Called when the client is done using the server"""
@@ -1865,35 +1898,6 @@ class ParsingFailure(Exception):
         self.recipe = recipe
         Exception.__init__(self, realexception, recipe)
 
-class Feeder(multiprocessing.Process):
-    def __init__(self, jobs, to_parsers, quit):
-        self.quit = quit
-        self.jobs = jobs
-        self.to_parsers = to_parsers
-        multiprocessing.Process.__init__(self)
-
-    def run(self):
-        while True:
-            try:
-                quit = self.quit.get_nowait()
-            except queue.Empty:
-                pass
-            else:
-                if quit == 'cancel':
-                    self.to_parsers.cancel_join_thread()
-                break
-
-            try:
-                job = self.jobs.pop()
-            except IndexError:
-                break
-
-            try:
-                self.to_parsers.put(job, timeout=0.5)
-            except queue.Full:
-                self.jobs.insert(0, job)
-                continue
-
 class Parser(multiprocessing.Process):
     def __init__(self, jobs, results, quit, init, profile):
         self.jobs = jobs
@@ -1940,11 +1944,8 @@ class Parser(multiprocessing.Process):
                 result = pending.pop()
             else:
                 try:
-                    job = self.jobs.get(timeout=0.25)
-                except queue.Empty:
-                    continue
-
-                if job is None:
+                    job = self.jobs.pop()
+                except IndexError:
                     break
                 result = self.parse(*job)
 
@@ -2028,14 +2029,15 @@ class CookerParser(object):
                 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
                 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
 
-            self.feeder_quit = multiprocessing.Queue(maxsize=1)
             self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
-            self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
             self.result_queue = multiprocessing.Queue()
-            self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
-            self.feeder.start()
+
+            def chunkify(lst,n):
+                return [lst[i::n] for i in range(n)]
+            self.jobs = chunkify(self.willparse, self.num_processes)
+
             for i in range(0, self.num_processes):
-                parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
+                parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
                 parser.start()
                 self.process_names.append(parser.name)
                 self.processes.append(parser)
@@ -2056,17 +2058,20 @@ class CookerParser(object):
                                             self.total)
 
             bb.event.fire(event, self.cfgdata)
-            self.feeder_quit.put(None)
             for process in self.processes:
                 self.parser_quit.put(None)
         else:
-            self.feeder_quit.put('cancel')
-
             self.parser_quit.cancel_join_thread()
             for process in self.processes:
                 self.parser_quit.put(None)
 
-            self.jobs.cancel_join_thread()
+        # Cleanup the queue before call process.join(), otherwise there might be
+        # deadlocks.
+        while True:
+            try:
+               self.result_queue.get(timeout=0.25)
+            except queue.Empty:
+                break
 
         for process in self.processes:
             if force:
@@ -2074,7 +2079,6 @@ class CookerParser(object):
                 process.terminate()
             else:
                 process.join()
-        self.feeder.join()
 
         sync = threading.Thread(target=self.bb_cache.sync)
         sync.start()

+ 25 - 19
bitbake/lib/bb/cookerdata.py

@@ -1,6 +1,3 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # Copyright (C) 2003, 2004  Chris Larson
 # Copyright (C) 2003, 2004  Phil Blundell
@@ -9,23 +6,14 @@
 # Copyright (C) 2005        ROAD GmbH
 # Copyright (C) 2006        Richard Purdie
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import logging
 import os
 import re
 import sys
+import hashlib
 from functools import wraps
 import bb
 from bb import data
@@ -134,6 +122,7 @@ class CookerConfiguration(object):
         self.profile = False
         self.nosetscene = False
         self.setsceneonly = False
+        self.skipsetscene = False
         self.invalidate_stamp = False
         self.dump_signatures = []
         self.dry_run = False
@@ -279,12 +268,13 @@ class CookerDataBuilder(object):
         self.mcdata = {}
 
     def parseBaseConfiguration(self):
+        data_hash = hashlib.sha256()
         try:
-            bb.parse.init_parser(self.basedata)
             self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles)
 
             if self.data.getVar("BB_WORKERCONTEXT", False) is None:
                 bb.fetch.fetcher_init(self.data)
+            bb.parse.init_parser(self.data)
             bb.codeparser.parser_cache_init(self.data)
 
             bb.event.fire(bb.event.ConfigParsed(), self.data)
@@ -302,7 +292,7 @@ class CookerDataBuilder(object):
                 bb.event.fire(bb.event.ConfigParsed(), self.data)
 
             bb.parse.init_parser(self.data)
-            self.data_hash = self.data.get_hash()
+            data_hash.update(self.data.get_hash().encode('utf-8'))
             self.mcdata[''] = self.data
 
             multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
@@ -310,9 +300,11 @@ class CookerDataBuilder(object):
                 mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config)
                 bb.event.fire(bb.event.ConfigParsed(), mcdata)
                 self.mcdata[config] = mcdata
+                data_hash.update(mcdata.get_hash().encode('utf-8'))
             if multiconfig:
                 bb.event.fire(bb.event.MultiConfigParsed(self.mcdata), self.data)
 
+            self.data_hash = data_hash.hexdigest()
         except (SyntaxError, bb.BBHandledException):
             raise bb.BBHandledException
         except bb.data_smart.ExpansionError as e:
@@ -354,14 +346,24 @@ class CookerDataBuilder(object):
             data = parse_config_file(layerconf, data)
 
             layers = (data.getVar('BBLAYERS') or "").split()
+            broken_layers = []
 
             data = bb.data.createCopy(data)
             approved = bb.utils.approved_variables()
+
+            # Check whether present layer directories exist
             for layer in layers:
                 if not os.path.isdir(layer):
-                    parselog.critical("Layer directory '%s' does not exist! "
-                                      "Please check BBLAYERS in %s" % (layer, layerconf))
-                    sys.exit(1)
+                    broken_layers.append(layer)
+
+            if broken_layers:
+                parselog.critical("The following layer directories do not exist:")
+                for layer in broken_layers:
+                    parselog.critical("   %s", layer)
+                parselog.critical("Please check BBLAYERS in %s" % (layerconf))
+                sys.exit(1)
+
+            for layer in layers:
                 parselog.debug(2, "Adding layer %s", layer)
                 if 'HOME' in approved and '~' in layer:
                     layer = os.path.expanduser(layer)
@@ -391,7 +393,11 @@ class CookerDataBuilder(object):
                 bb.fatal("BBFILES_DYNAMIC entries must be of the form <collection name>:<filename pattern>, not:\n    %s" % "\n    ".join(invalid))
 
             layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split())
+            collections_tmp = collections[:]
             for c in collections:
+                collections_tmp.remove(c)
+                if c in collections_tmp:
+                    bb.fatal("Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c)
                 compat = set((data.getVar("LAYERSERIES_COMPAT_%s" % c) or "").split())
                 if compat and not (compat & layerseries):
                     bb.fatal("Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)"

+ 4 - 0
bitbake/lib/bb/daemonize.py

@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
 """
 Python Daemonizing helper
 

+ 4 - 19
bitbake/lib/bb/data.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake 'Data' implementations
 
@@ -22,18 +20,7 @@ the speed is more critical here.
 # Copyright (C) 2003, 2004  Chris Larson
 # Copyright (C) 2005        Holger Hans Peter Freyther
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
 #
 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
 
@@ -143,7 +130,7 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
         if all:
             oval = d.getVar(var, False)
         val = d.getVar(var)
-    except (KeyboardInterrupt, bb.build.FuncFailed):
+    except (KeyboardInterrupt):
         raise
     except Exception as exc:
         o.write('# expansion of %s threw %s: %s\n' % (var, exc.__class__.__name__, str(exc)))
@@ -322,8 +309,6 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
             if varflags.get("python"):
                 value = d.getVarFlag(key, "_content", False)
                 parser = bb.codeparser.PythonParser(key, logger)
-                if value and "\t" in value:
-                    logger.warning("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE")))
                 parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
                 deps = deps | parser.references
                 deps = deps | (keys & parser.execs)
@@ -437,8 +422,8 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn):
             var = lookupcache[dep]
             if var is not None:
                 data = data + str(var)
-        k = fn + "." + task
-        basehash[k] = hashlib.md5(data.encode("utf-8")).hexdigest()
+        k = fn + ":" + task
+        basehash[k] = hashlib.sha256(data.encode("utf-8")).hexdigest()
         taskdeps[task] = alldeps
 
     return taskdeps, basehash

+ 7 - 18
bitbake/lib/bb/data_smart.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake Smart Dictionary Implementation
 
@@ -14,18 +12,8 @@ BitBake build tools.
 # Copyright (C) 2005        Uli Luckas
 # Copyright (C) 2005        ROAD GmbH
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
 
 import copy, re, sys, traceback
@@ -39,10 +27,11 @@ from bb.COW  import COWDictBase
 logger = logging.getLogger("BitBake.Data")
 
 __setvar_keyword__ = ["_append", "_prepend", "_remove"]
-__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>[^A-Z]*))?$')
-__expand_var_regexp__ = re.compile(r"\${[^{}@\n\t :]+}")
+__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>[^A-Z]*))?$')
+__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~]+?}")
 __expand_python_regexp__ = re.compile(r"\${@.+?}")
-__whitespace_split__ = re.compile('(\s)')
+__whitespace_split__ = re.compile(r'(\s)')
+__override_regexp__ = re.compile(r'[a-z0-9]+')
 
 def infer_caller_details(loginfo, parent = False, varval = True):
     """Save the caller the trouble of specifying everything."""
@@ -597,7 +586,7 @@ class DataSmart(MutableMapping):
         # aka pay the cookie monster
         override = var[var.rfind('_')+1:]
         shortvar = var[:var.rfind('_')]
-        while override and override.islower():
+        while override and __override_regexp__.match(override):
             if shortvar not in self.overridedata:
                 self.overridedata[shortvar] = []
             if [var, override] not in self.overridedata[shortvar]:
@@ -1073,4 +1062,4 @@ class DataSmart(MutableMapping):
                     data.update({i:value})
 
         data_str = str([(k, data[k]) for k in sorted(data.keys())])
-        return hashlib.md5(data_str.encode("utf-8")).hexdigest()
+        return hashlib.sha256(data_str.encode("utf-8")).hexdigest()

+ 3 - 30
bitbake/lib/bb/event.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake 'Event' implementation
 
@@ -9,18 +7,8 @@ BitBake build tools.
 
 # Copyright (C) 2003, 2004  Chris Larson
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import os, sys
 import warnings
@@ -136,6 +124,7 @@ def fire_class_handlers(event, d):
 ui_queue = []
 @atexit.register
 def print_ui_queue():
+    global ui_queue
     """If we're exiting before a UI has been spawned, display any queued
     LogRecords to the console."""
     logger = logging.getLogger("BitBake")
@@ -180,6 +169,7 @@ def print_ui_queue():
             logger.removeHandler(stderr)
         else:
             logger.removeHandler(stdout)
+        ui_queue = []
 
 def fire_ui_handlers(event, d):
     global _thread_lock
@@ -414,23 +404,6 @@ class RecipeTaskPreProcess(RecipeEvent):
 class RecipeParsed(RecipeEvent):
     """ Recipe Parsing Complete """
 
-class StampUpdate(Event):
-    """Trigger for any adjustment of the stamp files to happen"""
-
-    def __init__(self, targets, stampfns):
-        self._targets = targets
-        self._stampfns = stampfns
-        Event.__init__(self)
-
-    def getStampPrefix(self):
-        return self._stampfns
-
-    def getTargets(self):
-        return self._targets
-
-    stampPrefix = property(getStampPrefix)
-    targets = property(getTargets)
-
 class BuildBase(Event):
     """Base class for bitbake build events"""
 

+ 3 - 0
bitbake/lib/bb/exceptions.py

@@ -1,3 +1,6 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
 
 import inspect
 import traceback

+ 26 - 29
bitbake/lib/bb/fetch2/__init__.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake 'Fetch' implementations
 
@@ -10,18 +8,7 @@ BitBake build tools.
 # Copyright (C) 2003, 2004  Chris Larson
 # Copyright (C) 2012  Intel Corporation
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
 #
 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
 
@@ -256,7 +243,7 @@ class URI(object):
 
         # Identify if the URI is relative or not
         if urlp.scheme in self._relative_schemes and \
-           re.compile("^\w+:(?!//)").match(uri):
+           re.compile(r"^\w+:(?!//)").match(uri):
             self.relative = True
 
         if not self.relative:
@@ -524,7 +511,7 @@ def fetcher_parse_save():
 def fetcher_parse_done():
     _checksum_cache.save_merge()
 
-def fetcher_compare_revisions():
+def fetcher_compare_revisions(d):
     """
     Compare the revisions in the persistant cache with current values and
     return true/false on whether they've changed.
@@ -777,7 +764,8 @@ def get_srcrev(d, method_name='sortable_revision'):
     #
     format = d.getVar('SRCREV_FORMAT')
     if not format:
-        raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
+        raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.\n"\
+                         "The SCMs are:\n%s" % '\n'.join(scms))
 
     name_to_rev = {}
     seenautoinc = False
@@ -855,10 +843,18 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
         if val:
             cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
 
+    # Ensure that a _PYTHON_SYSCONFIGDATA_NAME value set by a recipe
+    # (for example via python3native.bbclass since warrior) is not set for
+    # host Python (otherwise tools like git-make-shallow will fail)
+    cmd = 'unset _PYTHON_SYSCONFIGDATA_NAME; ' + cmd
+
     # Disable pseudo as it may affect ssh, potentially causing it to hang.
     cmd = 'export PSEUDO_DISABLED=1; ' + cmd
 
-    logger.debug(1, "Running %s", cmd)
+    if workdir:
+        logger.debug(1, "Running '%s' in %s" % (cmd, workdir))
+    else:
+        logger.debug(1, "Running %s", cmd)
 
     success = False
     error_message = ""
@@ -894,7 +890,7 @@ def check_network_access(d, info, url):
     log remote network access, and error if BB_NO_NETWORK is set or the given
     URI is untrusted
     """
-    if d.getVar("BB_NO_NETWORK") == "1":
+    if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
         raise NetworkAccess(url, info)
     elif not trusted_network(d, url):
         raise UntrustedUrl(url, info)
@@ -966,7 +962,8 @@ def rename_bad_checksum(ud, suffix):
 
     new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
     bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
-    bb.utils.movefile(ud.localpath, new_localpath)
+    if not bb.utils.movefile(ud.localpath, new_localpath):
+        bb.warn("Renaming %s to %s failed, grep movefile in log.do_fetch to see why" % (ud.localpath, new_localpath))
 
 
 def try_mirror_url(fetch, origud, ud, ld, check = False):
@@ -1027,7 +1024,7 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
         raise
 
     except IOError as e:
-        if e.errno in [os.errno.ESTALE]:
+        if e.errno in [errno.ESTALE]:
             logger.warning("Stale Error Observed %s." % ud.url)
             return False
         raise
@@ -1094,7 +1091,7 @@ def trusted_network(d, url):
     BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
     Note: modifies SRC_URI & mirrors.
     """
-    if d.getVar('BB_NO_NETWORK') == "1":
+    if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
         return True
 
     pkgname = d.expand(d.getVar('PN', False))
@@ -1403,7 +1400,7 @@ class FetchMethod(object):
         Fetch urls
         Assumes localpath was called first
         """
-        raise NoMethodError(url)
+        raise NoMethodError(urldata.url)
 
     def unpack(self, urldata, rootdir, data):
         iterate = False
@@ -1469,7 +1466,7 @@ class FetchMethod(object):
                 else:
                     cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
             elif file.endswith('.deb') or file.endswith('.ipk'):
-                output = subprocess.check_output('ar -t %s' % file, preexec_fn=subprocess_setup, shell=True)
+                output = subprocess.check_output(['ar', '-t', file], preexec_fn=subprocess_setup)
                 datafile = None
                 if output:
                     for line in output.decode().splitlines():
@@ -1547,7 +1544,7 @@ class FetchMethod(object):
         Check the status of a URL
         Assumes localpath was called first
         """
-        logger.info("URL %s could not be checked for status since no method exists.", url)
+        logger.info("URL %s could not be checked for status since no method exists.", urldata.url)
         return True
 
     def latest_revision(self, ud, d, name):
@@ -1555,7 +1552,7 @@ class FetchMethod(object):
         Look in the cache for the latest revision, if not present ask the SCM.
         """
         if not hasattr(self, "_latest_revision"):
-            raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
+            raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
 
         revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
         key = self.generate_revision_key(ud, d, name)
@@ -1638,7 +1635,7 @@ class Fetch(object):
             urls = self.urls
 
         network = self.d.getVar("BB_NO_NETWORK")
-        premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY") == "1")
+        premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY"))
 
         for u in urls:
             ud = self.ud[u]
@@ -1716,7 +1713,7 @@ class Fetch(object):
                 update_stamp(ud, self.d)
 
             except IOError as e:
-                if e.errno in [os.errno.ESTALE]:
+                if e.errno in [errno.ESTALE]:
                     logger.error("Stale Error Observed %s." % u)
                     raise ChecksumError("Stale Error Detected")
 
@@ -1786,7 +1783,7 @@ class Fetch(object):
 
         for url in urls:
             if url not in self.ud:
-                self.ud[url] = FetchData(url, d)
+                self.ud[url] = FetchData(url, self.d)
             ud = self.ud[url]
             ud.setup_localpath(self.d)
 

+ 1 - 11
bitbake/lib/bb/fetch2/bzr.py

@@ -10,18 +10,8 @@ BitBake 'Fetch' implementation for bzr.
 #   BitBake build tools.
 #   Copyright (C) 2003, 2004  Chris Larson
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import os
 import sys

+ 4 - 15
bitbake/lib/bb/fetch2/clearcase.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake 'Fetch' clearcase implementation
 
@@ -47,18 +45,7 @@ User credentials:
 """
 # Copyright (C) 2014 Siemens AG
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
 #
 
 import os
@@ -67,6 +54,8 @@ import shutil
 import bb
 from   bb.fetch2 import FetchMethod
 from   bb.fetch2 import FetchError
+from   bb.fetch2 import MissingParameterError
+from   bb.fetch2 import ParameterError
 from   bb.fetch2 import runfetchcmd
 from   bb.fetch2 import logger
 
@@ -92,7 +81,7 @@ class ClearCase(FetchMethod):
         if 'protocol' in ud.parm:
             ud.proto = ud.parm['protocol']
         if not ud.proto in ('http', 'https'):
-            raise fetch2.ParameterError("Invalid protocol type", ud.url)
+            raise ParameterError("Invalid protocol type", ud.url)
 
         ud.vob = ''
         if 'vob' in ud.parm:

+ 2 - 15
bitbake/lib/bb/fetch2/cvs.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake 'Fetch' implementations
 
@@ -10,20 +8,9 @@ BitBake build tools.
 
 # Copyright (C) 2003, 2004  Chris Larson
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-#Based on functions from the base bb module, Copyright 2003 Holger Schurig
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
 #
 
 import os

+ 47 - 21
bitbake/lib/bb/fetch2/git.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake 'Fetch' git implementation
 
@@ -55,20 +53,10 @@ Supported SRC_URI options are:
 
 """
 
-#Copyright (C) 2005 Richard Purdie
+# Copyright (C) 2005 Richard Purdie
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import collections
 import errno
@@ -199,7 +187,7 @@ class Git(FetchMethod):
             depth_default = 1
         ud.shallow_depths = collections.defaultdict(lambda: depth_default)
 
-        revs_default = d.getVar("BB_GIT_SHALLOW_REVS", True)
+        revs_default = d.getVar("BB_GIT_SHALLOW_REVS")
         ud.shallow_revs = []
         ud.branches = {}
         for pos, name in enumerate(ud.names):
@@ -318,7 +306,7 @@ class Git(FetchMethod):
     def try_premirror(self, ud, d):
         # If we don't do this, updating an existing checkout with only premirrors
         # is not possible
-        if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
+        if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")):
             return True
         if os.path.exists(ud.clonedir):
             return False
@@ -476,6 +464,8 @@ class Git(FetchMethod):
         if os.path.exists(destdir):
             bb.utils.prunedir(destdir)
 
+        need_lfs = ud.parm.get("lfs", "1") == "1"
+
         source_found = False
         source_error = []
 
@@ -503,6 +493,13 @@ class Git(FetchMethod):
 
         repourl = self._get_repo_url(ud)
         runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d, workdir=destdir)
+
+        if self._contains_lfs(ud, d, destdir):
+            if need_lfs and not self._find_git_lfs(d):
+                raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl))
+            else:
+                bb.note("Repository %s has LFS content but it is not being fetched" % (repourl))
+
         if not ud.nocheckout:
             if subdir != "":
                 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d,
@@ -522,9 +519,17 @@ class Git(FetchMethod):
     def clean(self, ud, d):
         """ clean the git directory """
 
-        bb.utils.remove(ud.localpath, True)
-        bb.utils.remove(ud.fullmirror)
-        bb.utils.remove(ud.fullmirror + ".done")
+        to_remove = [ud.localpath, ud.fullmirror, ud.fullmirror + ".done"]
+        # The localpath is a symlink to clonedir when it is cloned from a
+        # mirror, so remove both of them.
+        if os.path.islink(ud.localpath):
+            clonedir = os.path.realpath(ud.localpath)
+            to_remove.append(clonedir)
+
+        for r in to_remove:
+            if os.path.exists(r):
+                bb.note('Removing %s' % r)
+                bb.utils.remove(r, True)
 
     def supports_srcrev(self):
         return True
@@ -545,6 +550,27 @@ class Git(FetchMethod):
             raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
         return output.split()[0] != "0"
 
+    def _contains_lfs(self, ud, d, wd):
+        """
+        Check if the repository has 'lfs' (large file) content
+        """
+        cmd = "%s grep lfs HEAD:.gitattributes | wc -l" % (
+                ud.basecmd)
+        try:
+            output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
+            if int(output) > 0:
+                return True
+        except (bb.fetch2.FetchError,ValueError):
+            pass
+        return False
+
+    def _find_git_lfs(self, d):
+        """
+        Return True if git-lfs can be found, False otherwise.
+        """
+        import shutil
+        return shutil.which("git-lfs", path=d.getVar('PATH')) is not None
+
     def _get_repo_url(self, ud):
         """
         Return the repository URL
@@ -615,7 +641,7 @@ class Git(FetchMethod):
         """
         pupver = ('', '')
 
-        tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or "(?P<pver>([0-9][\.|_]?)+)")
+        tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
         try:
             output = self._lsremote(ud, d, "refs/tags/*")
         except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e:
@@ -630,7 +656,7 @@ class Git(FetchMethod):
 
             tag_head = line.split("/")[-1]
             # Ignore non-released branches
-            m = re.search("(alpha|beta|rc|final)+", tag_head)
+            m = re.search(r"(alpha|beta|rc|final)+", tag_head)
             if m:
                 continue
 

+ 1 - 13
bitbake/lib/bb/fetch2/gitannex.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake 'Fetch' git annex implementation
 """
@@ -7,18 +5,8 @@ BitBake 'Fetch' git annex implementation
 # Copyright (C) 2014 Otavio Salvador
 # Copyright (C) 2014 O.S. Systems Software LTDA.
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import os
 import bb

+ 123 - 158
bitbake/lib/bb/fetch2/gitsm.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake 'Fetch' git submodules implementation
 
@@ -16,18 +14,8 @@ NOTE: Switching a SRC_URI from "git://" to "gitsm://" requires a clean of your r
 
 # Copyright (C) 2013 Richard Purdie
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import os
 import bb
@@ -45,60 +33,97 @@ class GitSM(Git):
         """
         return ud.type in ['gitsm']
 
-    @staticmethod
-    def parse_gitmodules(gitmodules):
-        modules = {}
-        module = ""
-        for line in gitmodules.splitlines():
-            if line.startswith('[submodule'):
-                module = line.split('"')[1]
-                modules[module] = {}
-            elif module and line.strip().startswith('path'):
-                path = line.split('=')[1].strip()
-                modules[module]['path'] = path
-            elif module and line.strip().startswith('url'):
-                url = line.split('=')[1].strip()
-                modules[module]['url'] = url
-        return modules
-
-    def update_submodules(self, ud, d):
+    def process_submodules(self, ud, workdir, function, d):
+        """
+        Iterate over all of the submodules in this repository and execute
+        the 'function' for each of them.
+        """
+
         submodules = []
         paths = {}
+        revision = {}
         uris = {}
-        local_paths = {}
-
+        subrevision = {}
+
+        def parse_gitmodules(gitmodules):
+            modules = {}
+            module = ""
+            for line in gitmodules.splitlines():
+                if line.startswith('[submodule'):
+                    module = line.split('"')[1]
+                    modules[module] = {}
+                elif module and line.strip().startswith('path'):
+                    path = line.split('=')[1].strip()
+                    modules[module]['path'] = path
+                elif module and line.strip().startswith('url'):
+                    url = line.split('=')[1].strip()
+                    modules[module]['url'] = url
+            return modules
+
+        # Collect the defined submodules, and their attributes
         for name in ud.names:
             try:
-                gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=ud.clonedir)
+                gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=workdir)
             except:
                 # No submodules to update
                 continue
 
-            for m, md in self.parse_gitmodules(gitmodules).items():
+            for m, md in parse_gitmodules(gitmodules).items():
+                try:
+                    module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], md['path']), d, quiet=True, workdir=workdir)
+                except:
+                    # If the command fails, we don't have a valid file to check.  If it doesn't
+                    # fail -- it still might be a failure, see next check...
+                    module_hash = ""
+
+                if not module_hash:
+                    logger.debug(1, "submodule %s is defined, but is not initialized in the repository. Skipping", m)
+                    continue
+
                 submodules.append(m)
                 paths[m] = md['path']
+                revision[m] = ud.revisions[name]
                 uris[m] = md['url']
+                subrevision[m] = module_hash.split()[2]
+
+                # Convert relative to absolute uri based on parent uri
                 if uris[m].startswith('..'):
                     newud = copy.copy(ud)
-                    newud.path = os.path.realpath(os.path.join(newud.path, md['url']))
+                    newud.path = os.path.realpath(os.path.join(newud.path, uris[m]))
                     uris[m] = Git._get_repo_url(self, newud)
 
         for module in submodules:
-            module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], paths[module]), d, quiet=True, workdir=ud.clonedir)
-            module_hash = module_hash.split()[2]
+            # Translate the module url into a SRC_URI
+
+            if "://" in uris[module]:
+                # Properly formated URL already
+                proto = uris[module].split(':', 1)[0]
+                url = uris[module].replace('%s:' % proto, 'gitsm:', 1)
+            else:
+                if ":" in uris[module]:
+                    # Most likely an SSH style reference
+                    proto = "ssh"
+                    if ":/" in uris[module]:
+                        # Absolute reference, easy to convert..
+                        url = "gitsm://" + uris[module].replace(':/', '/', 1)
+                    else:
+                        # Relative reference, no way to know if this is right!
+                        logger.warning("Submodule included by %s refers to relative ssh reference %s.  References may fail if not absolute." % (ud.url, uris[module]))
+                        url = "gitsm://" + uris[module].replace(':', '/', 1)
+                else:
+                    # This has to be a file reference
+                    proto = "file"
+                    url = "gitsm://" + uris[module]
 
-            # Build new SRC_URI
-            proto = uris[module].split(':', 1)[0]
-            url = uris[module].replace('%s:' % proto, 'gitsm:', 1)
             url += ';protocol=%s' % proto
             url += ";name=%s" % module
-            url += ";bareclone=1;nocheckout=1;nobranch=1"
+            url += ";subpath=%s" % module
 
             ld = d.createCopy()
             # Not necessary to set SRC_URI, since we're passing the URI to
             # Fetch.
             #ld.setVar('SRC_URI', url)
-            ld.setVar('SRCREV_%s' % module, module_hash)
+            ld.setVar('SRCREV_%s' % module, subrevision[module])
 
             # Workaround for issues with SRCPV/SRCREV_FORMAT errors
             # error refer to 'multiple' repositories.  Only the repository
@@ -106,145 +131,85 @@ class GitSM(Git):
             ld.setVar('SRCPV', d.getVar('SRCPV'))
             ld.setVar('SRCREV_FORMAT', module)
 
-            newfetch = Fetch([url], ld, cache=False)
-            newfetch.download()
-            local_paths[module] = newfetch.localpath(url)
+            function(ud, url, module, paths[module], ld)
 
-            # Correct the submodule references to the local download version...
-            runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.clonedir)
-
-            symlink_path = os.path.join(ud.clonedir, 'modules', paths[module])
-            if not os.path.exists(symlink_path):
-                try:
-                    os.makedirs(os.path.dirname(symlink_path), exist_ok=True)
-                except OSError:
-                    pass
-                os.symlink(local_paths[module], symlink_path)
-
-        return True
+        return submodules != []
 
     def need_update(self, ud, d):
-        main_repo_needs_update = Git.need_update(self, ud, d)
-
-        # First check that the main repository has enough history fetched. If it doesn't, then we don't
-        # even have the .gitmodules and gitlinks for the submodules to attempt asking whether the
-        # submodules' histories are recent enough.
-        if main_repo_needs_update:
+        if Git.need_update(self, ud, d):
             return True
 
-        # Now check that the submodule histories are new enough. The git-submodule command doesn't have
-        # any clean interface for doing this aside from just attempting the checkout (with network
-        # fetched disabled).
-        return not self.update_submodules(ud, d)
+        try:
+            # Check for the nugget dropped by the download operation
+            known_srcrevs = runfetchcmd("%s config --get-all bitbake.srcrev" % \
+                            (ud.basecmd), d, workdir=ud.clonedir)
 
-    def download(self, ud, d):
-        Git.download(self, ud, d)
+            if ud.revisions[ud.names[0]] not in known_srcrevs.split():
+                return True
+        except bb.fetch2.FetchError:
+            # No srcrev nuggets, so this is new and needs to be updated
+            return True
 
-        if not ud.shallow or ud.localpath != ud.fullshallow:
-            self.update_submodules(ud, d)
+        return False
 
-    def copy_submodules(self, submodules, ud, destdir, d):
-        if ud.bareclone:
-            repo_conf = destdir
-        else:
-            repo_conf = os.path.join(destdir, '.git')
+    def download(self, ud, d):
+        def download_submodule(ud, url, module, modpath, d):
+            url += ";bareclone=1;nobranch=1"
 
-        if submodules and not os.path.exists(os.path.join(repo_conf, 'modules')):
-            os.mkdir(os.path.join(repo_conf, 'modules'))
+            # Is the following still needed?
+            #url += ";nocheckout=1"
 
-        for module, md in submodules.items():
-            srcpath = os.path.join(ud.clonedir, 'modules', md['path'])
-            modpath = os.path.join(repo_conf, 'modules', md['path'])
+            try:
+                newfetch = Fetch([url], d, cache=False)
+                newfetch.download()
+                # Drop a nugget to add each of the srcrevs we've fetched (used by need_update)
+                runfetchcmd("%s config --add bitbake.srcrev %s" % \
+                            (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
+            except Exception as e:
+                logger.error('gitsm: submodule download failed: %s %s' % (type(e).__name__, str(e)))
+                raise
 
-            if os.path.exists(srcpath):
-                if os.path.exists(os.path.join(srcpath, '.git')):
-                    srcpath = os.path.join(srcpath, '.git')
+        Git.download(self, ud, d)
+        self.process_submodules(ud, ud.clonedir, download_submodule, d)
 
-                target = modpath
-                if os.path.exists(modpath):
-                    target = os.path.dirname(modpath)
+    def unpack(self, ud, destdir, d):
+        def unpack_submodules(ud, url, module, modpath, d):
+            url += ";bareclone=1;nobranch=1"
 
-                os.makedirs(os.path.dirname(target), exist_ok=True)
-                runfetchcmd("cp -fpLR %s %s" % (srcpath, target), d)
-            elif os.path.exists(modpath):
-                # Module already exists, likely unpacked from a shallow mirror clone
-                pass
+            # Figure out where we clone over the bare submodules...
+            if ud.bareclone:
+                repo_conf = ud.destdir
             else:
-                # This is fatal, as we do NOT want git-submodule to hit the network
-                raise bb.fetch2.FetchError('Submodule %s does not exist in %s or %s.' % (module, srcpath, modpath))
-
-    def clone_shallow_local(self, ud, dest, d):
-        super(GitSM, self).clone_shallow_local(ud, dest, d)
+                repo_conf = os.path.join(ud.destdir, '.git')
 
-        # Copy over the submodules' fetched histories too.
-        repo_conf = os.path.join(dest, '.git')
-
-        submodules = []
-        for name in ud.names:
             try:
-                gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revision), d, quiet=True, workdir=dest)
-            except:
-                # No submodules to update
-                continue
+                newfetch = Fetch([url], d, cache=False)
+                newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', module)))
+            except Exception as e:
+                logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e)))
+                raise
 
-            submodules = self.parse_gitmodules(gitmodules)
-            self.copy_submodules(submodules, ud, dest, d)
+            local_path = newfetch.localpath(url)
 
-    def unpack(self, ud, destdir, d):
-        Git.unpack(self, ud, destdir, d)
+            # Correct the submodule references to the local download version...
+            runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_path}, d, workdir=ud.destdir)
 
-        # Copy over the submodules' fetched histories too.
-        if ud.bareclone:
-            repo_conf = ud.destdir
-        else:
-            repo_conf = os.path.join(ud.destdir, '.git')
+            if ud.shallow:
+                runfetchcmd("%(basecmd)s config submodule.%(module)s.shallow true" % {'basecmd': ud.basecmd, 'module': module}, d, workdir=ud.destdir)
 
-        update_submodules = False
-        paths = {}
-        uris = {}
-        local_paths = {}
-        for name in ud.names:
+            # Ensure the submodule repository is NOT set to bare, since we're checking it out...
             try:
-                gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
+                runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=os.path.join(repo_conf, 'modules', module))
             except:
-                # No submodules to update
-                continue
-
-            submodules = self.parse_gitmodules(gitmodules)
-            self.copy_submodules(submodules, ud, ud.destdir, d)
-
-            submodules_queue = [(module, os.path.join(repo_conf, 'modules', md['path'])) for module, md in submodules.items()]
-            while len(submodules_queue) != 0:
-                module, modpath = submodules_queue.pop()
-
-                # add submodule children recursively
-                try:
-                    gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=modpath)
-                    for m, md in self.parse_gitmodules(gitmodules).items():
-                        submodules_queue.append([m, os.path.join(modpath, 'modules', md['path'])])
-                except:
-                    # no children
-                    pass
-
+                logger.error("Unable to set git config core.bare to false for %s" % os.path.join(repo_conf, 'modules', module))
+                raise
 
-                # There are submodules to update
-                update_submodules = True
-
-                # Determine (from the submodule) the correct url to reference
-                try:
-                    output = runfetchcmd("%(basecmd)s config remote.origin.url" % {'basecmd': ud.basecmd}, d, workdir=modpath)
-                except bb.fetch2.FetchError as e:
-                    # No remote url defined in this submodule
-                    continue
-
-                local_paths[module] = output
-
-                # Setup the local URL properly (like git submodule init or sync would do...)
-                runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.destdir)
+        Git.unpack(self, ud, destdir, d)
 
-                # Ensure the submodule repository is NOT set to bare, since we're checking it out...
-                runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=modpath)
+        ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d)
 
-        if update_submodules:
-            # Run submodule update, this sets up the directories -- without touching the config
+        if not ud.bareclone and ret:
+            # All submodules should already be downloaded and configured in the tree.  This simply sets
+            # up the configuration and checks out the files.  The main project config should remain
+            # unmodified, and no download from the internet should occur.
             runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)

+ 3 - 15
bitbake/lib/bb/fetch2/hg.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake 'Fetch' implementation for mercurial DRCS (hg).
 
@@ -9,20 +7,10 @@ BitBake 'Fetch' implementation for mercurial DRCS (hg).
 # Copyright (C) 2004        Marcin Juszkiewicz
 # Copyright (C) 2007        Robert Schuster
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
 #
 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
+#
 
 import os
 import sys
@@ -99,7 +87,7 @@ class Hg(FetchMethod):
     def try_premirror(self, ud, d):
         # If we don't do this, updating an existing checkout with only premirrors
         # is not possible
-        if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
+        if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")):
             return True
         if os.path.exists(ud.moddir):
             return False

+ 2 - 14
bitbake/lib/bb/fetch2/local.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake 'Fetch' implementations
 
@@ -10,20 +8,10 @@ BitBake build tools.
 
 # Copyright (C) 2003, 2004  Chris Larson
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
 #
 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
+#
 
 import os
 import urllib.request, urllib.parse, urllib.error

+ 31 - 38
bitbake/lib/bb/fetch2/npm.py

@@ -1,5 +1,6 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
 """
 BitBake 'Fetch' NPM implementation
 
@@ -100,11 +101,19 @@ class Npm(FetchMethod):
             return False
         return True
 
-    def _runwget(self, ud, d, command, quiet):
-        logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
-        bb.fetch2.check_network_access(d, command, ud.url)
+    def _runpack(self, ud, d, pkgfullname: str, quiet=False) -> str:
+        """
+        Runs npm pack on a full package name.
+        Returns the filename of the downloaded package
+        """
+        bb.fetch2.check_network_access(d, pkgfullname, ud.registry)
         dldir = d.getVar("DL_DIR")
-        runfetchcmd(command, d, quiet, workdir=dldir)
+        dldir = os.path.join(dldir, ud.prefixdir)
+
+        command = "npm pack {} --registry {}".format(pkgfullname, ud.registry)
+        logger.debug(2, "Fetching {} using command '{}' in {}".format(pkgfullname, command, dldir))
+        filename = runfetchcmd(command, d, quiet, workdir=dldir)
+        return filename.rstrip()
 
     def _unpackdep(self, ud, pkg, data, destdir, dldir, d):
         file = data[pkg]['tgz']
@@ -150,20 +159,11 @@ class Npm(FetchMethod):
         Parse the output of npm view --json; the last JSON result
         is assumed to be the one that we're interested in.
         '''
-        pdata = None
-        outdeps = {}
-        datalines = []
-        bracelevel = 0
-        for line in output.splitlines():
-            if bracelevel:
-                datalines.append(line)
-            elif '{' in line:
-                datalines = []
-                datalines.append(line)
-            bracelevel = bracelevel + line.count('{') - line.count('}')
-        if datalines:
-            pdata = json.loads('\n'.join(datalines))
-        return pdata
+        pdata = json.loads(output);
+        try:
+            return pdata[-1]
+        except:
+            return pdata
 
     def _getdependencies(self, pkg, data, version, d, ud, optional=False, fetchedlist=None):
         if fetchedlist is None:
@@ -171,6 +171,9 @@ class Npm(FetchMethod):
         pkgfullname = pkg
         if version != '*' and not '/' in version:
             pkgfullname += "@'%s'" % version
+        if pkgfullname in fetchedlist:
+            return
+
         logger.debug(2, "Calling getdeps on %s" % pkg)
         fetchcmd = "npm view %s --json --registry %s" % (pkgfullname, ud.registry)
         output = runfetchcmd(fetchcmd, d, True)
@@ -190,15 +193,10 @@ class Npm(FetchMethod):
                 if (not blacklist and 'linux' not in pkg_os) or '!linux' in pkg_os:
                     logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg)
                     return
-        #logger.debug(2, "Output URL is %s - %s - %s" % (ud.basepath, ud.basename, ud.localfile))
-        outputurl = pdata['dist']['tarball']
+        filename = self._runpack(ud, d, pkgfullname)
         data[pkg] = {}
-        data[pkg]['tgz'] = os.path.basename(outputurl)
-        if outputurl in fetchedlist:
-            return
-
-        self._runwget(ud, d, "%s --directory-prefix=%s %s" % (self.basecmd, ud.prefixdir, outputurl), False)
-        fetchedlist.append(outputurl)
+        data[pkg]['tgz'] = filename
+        fetchedlist.append(pkgfullname)
 
         dependencies = pdata.get('dependencies', {})
         optionalDependencies = pdata.get('optionalDependencies', {})
@@ -225,17 +223,12 @@ class Npm(FetchMethod):
                     if obj == pkg:
                         self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest, False)
                         return
-        outputurl = "invalid"
-        if ('resolved' not in data) or (not data['resolved'].startswith('http://') and not data['resolved'].startswith('https://')):
-            # will be the case for ${PN}
-            fetchcmd = "npm view %s@%s dist.tarball --registry %s" % (pkg, version, ud.registry)
-            logger.debug(2, "Found this matching URL: %s" % str(fetchcmd))
-            outputurl = runfetchcmd(fetchcmd, d, True)
-        else:
-            outputurl = data['resolved']
-        self._runwget(ud, d, "%s --directory-prefix=%s %s" % (self.basecmd, ud.prefixdir, outputurl), False)
+
+        pkgnameWithVersion = "{}@{}".format(pkg, version)
+        logger.debug(2, "Get dependencies for {}".format(pkgnameWithVersion))
+        filename = self._runpack(ud, d, pkgnameWithVersion)
         manifest[pkg] = {}
-        manifest[pkg]['tgz'] = os.path.basename(outputurl).rstrip()
+        manifest[pkg]['tgz'] = filename
         manifest[pkg]['deps'] = {}
 
         if pkg in lockdown:

+ 3 - 2
bitbake/lib/bb/fetch2/osc.py

@@ -1,5 +1,6 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
 """
 Bitbake "Fetch" implementation for osc (Opensuse build service client).
 Based on the svn "Fetch" implementation.

+ 1 - 14
bitbake/lib/bb/fetch2/perforce.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake 'Fetch' implementation for perforce
 
@@ -8,18 +6,7 @@ BitBake 'Fetch' implementation for perforce
 # Copyright (C) 2003, 2004  Chris Larson
 # Copyright (C) 2016 Kodak Alaris, Inc.
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
 #
 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
 

+ 2 - 14
bitbake/lib/bb/fetch2/repo.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake "Fetch" repo (git) implementation
 
@@ -8,20 +6,10 @@ BitBake "Fetch" repo (git) implementation
 # Copyright (C) 2009 Tom Rini <trini@embeddedalley.com>
 #
 # Based on git.py which is:
-#Copyright (C) 2005 Richard Purdie
+# Copyright (C) 2005 Richard Purdie
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import os
 import bb

+ 1 - 14
bitbake/lib/bb/fetch2/s3.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake 'Fetch' implementation for Amazon AWS S3.
 
@@ -13,18 +11,7 @@ The aws tool must be correctly installed and configured prior to use.
 # Based in part on bb.fetch2.wget:
 #    Copyright (C) 2003, 2004  Chris Larson
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
 #
 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
 

+ 1 - 14
bitbake/lib/bb/fetch2/sftp.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake SFTP Fetch implementation
 
@@ -44,18 +42,7 @@ SRC_URI = "sftp://user@host.example.com/dir/path.file.txt"
 # Based in part on bb.fetch2.wget:
 #    Copyright (C) 2003, 2004  Chris Larson
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
 #
 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
 

+ 1 - 13
bitbake/lib/bb/fetch2/ssh.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 '''
 BitBake 'Fetch' implementations
 
@@ -29,18 +27,8 @@ IETF secsh internet draft:
 #            Copyright 2003 Holger Schurig
 #
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import re, os
 from   bb.fetch2 import FetchMethod

+ 56 - 42
bitbake/lib/bb/fetch2/svn.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake 'Fetch' implementation for svn.
 
@@ -8,18 +6,7 @@ BitBake 'Fetch' implementation for svn.
 # Copyright (C) 2003, 2004  Chris Larson
 # Copyright (C) 2004        Marcin Juszkiewicz
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
 #
 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
 
@@ -63,6 +50,9 @@ class Svn(FetchMethod):
         relpath = self._strip_leading_slashes(ud.path)
         ud.pkgdir = os.path.join(svndir, ud.host, relpath)
         ud.moddir = os.path.join(ud.pkgdir, ud.module)
+        # Protects the repository from concurrent updates, e.g. from two
+        # recipes fetching different revisions at the same time
+        ud.svnlock = os.path.join(ud.pkgdir, "svn.lock")
 
         ud.setup_revisions(d)
 
@@ -101,6 +91,13 @@ class Svn(FetchMethod):
             svncmd = "%s log --limit 1 %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
         else:
             suffix = ""
+
+            # externals may be either 'allowed' or 'nowarn', but not both.  Allowed
+            # will not issue a warning, but will log to the debug buffer what has likely
+            # been downloaded by SVN.
+            if not ("externals" in ud.parm and ud.parm["externals"] == "allowed"):
+                options.append("--ignore-externals")
+
             if ud.revision:
                 options.append("-r %s" % ud.revision)
                 suffix = "@%s" % (ud.revision)
@@ -123,35 +120,52 @@ class Svn(FetchMethod):
 
         logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
 
-        if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
-            svnupdatecmd = self._buildsvncommand(ud, d, "update")
-            logger.info("Update " + ud.url)
-            # We need to attempt to run svn upgrade first in case its an older working format
-            try:
-                runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
-            except FetchError:
-                pass
-            logger.debug(1, "Running %s", svnupdatecmd)
-            bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
-            runfetchcmd(svnupdatecmd, d, workdir=ud.moddir)
-        else:
-            svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
-            logger.info("Fetch " + ud.url)
-            # check out sources there
-            bb.utils.mkdirhier(ud.pkgdir)
-            logger.debug(1, "Running %s", svnfetchcmd)
-            bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
-            runfetchcmd(svnfetchcmd, d, workdir=ud.pkgdir)
-
-        scmdata = ud.parm.get("scmdata", "")
-        if scmdata == "keep":
-            tar_flags = ""
-        else:
-            tar_flags = "--exclude='.svn'"
+        lf = bb.utils.lockfile(ud.svnlock)
+
+        try:
+            if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
+                svnupdatecmd = self._buildsvncommand(ud, d, "update")
+                logger.info("Update " + ud.url)
+                # We need to attempt to run svn upgrade first in case its an older working format
+                try:
+                    runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
+                except FetchError:
+                    pass
+                logger.debug(1, "Running %s", svnupdatecmd)
+                bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
+                runfetchcmd(svnupdatecmd, d, workdir=ud.moddir)
+            else:
+                svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
+                logger.info("Fetch " + ud.url)
+                # check out sources there
+                bb.utils.mkdirhier(ud.pkgdir)
+                logger.debug(1, "Running %s", svnfetchcmd)
+                bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
+                runfetchcmd(svnfetchcmd, d, workdir=ud.pkgdir)
+
+            if not ("externals" in ud.parm and ud.parm["externals"] == "nowarn"):
+                # Warn the user if this had externals (won't catch them all)
+                output = runfetchcmd("svn propget svn:externals || true", d, workdir=ud.moddir)
+                if output:
+                    if "--ignore-externals" in svnfetchcmd.split():
+                        bb.warn("%s contains svn:externals." % ud.url)
+                        bb.warn("These should be added to the recipe SRC_URI as necessary.")
+                        bb.warn("svn fetch has ignored externals:\n%s" % output)
+                        bb.warn("To disable this warning add ';externals=nowarn' to the url.")
+                    else:
+                        bb.debug(1, "svn repository has externals:\n%s" % output)
+
+            scmdata = ud.parm.get("scmdata", "")
+            if scmdata == "keep":
+                tar_flags = ""
+            else:
+                tar_flags = "--exclude='.svn'"
 
-        # tar them up to a defined filename
-        runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d,
-                    cleanup=[ud.localpath], workdir=ud.pkgdir)
+            # tar them up to a defined filename
+            runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d,
+                        cleanup=[ud.localpath], workdir=ud.pkgdir)
+        finally:
+            bb.utils.unlockfile(lf)
 
     def clean(self, ud, d):
         """ Clean SVN specific files and dirs """

+ 43 - 61
bitbake/lib/bb/fetch2/wget.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake 'Fetch' implementations
 
@@ -10,18 +8,7 @@ BitBake build tools.
 
 # Copyright (C) 2003, 2004  Chris Larson
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
 #
 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
 
@@ -33,11 +20,14 @@ import logging
 import errno
 import bb
 import bb.progress
+import socket
+import http.client
 import urllib.request, urllib.parse, urllib.error
 from   bb.fetch2 import FetchMethod
 from   bb.fetch2 import FetchError
 from   bb.fetch2 import logger
 from   bb.fetch2 import runfetchcmd
+from   bb.fetch2 import FetchConnectionCache
 from   bb.utils import export_proxies
 from   bs4 import BeautifulSoup
 from   bs4 import SoupStrainer
@@ -132,10 +122,6 @@ class Wget(FetchMethod):
         return True
 
     def checkstatus(self, fetch, ud, d, try_again=True):
-        import urllib.request, urllib.error, urllib.parse, socket, http.client
-        from urllib.response import addinfourl
-        from bb.fetch2 import FetchConnectionCache
-
         class HTTPConnectionCache(http.client.HTTPConnection):
             if fetch.connection_cache:
                 def connect(self):
@@ -168,7 +154,7 @@ class Wget(FetchMethod):
                 """
                 host = req.host
                 if not host:
-                    raise urlllib2.URLError('no host given')
+                    raise urllib.error.URLError('no host given')
 
                 h = http_class(host, timeout=req.timeout) # will parse host:port
                 h.set_debuglevel(self._debuglevel)
@@ -185,7 +171,7 @@ class Wget(FetchMethod):
                 # request.
 
                 # Don't close connection when connection_cache is enabled,
-                if fetch.connection_cache is None: 
+                if fetch.connection_cache is None:
                     headers["Connection"] = "close"
                 else:
                     headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
@@ -252,7 +238,7 @@ class Wget(FetchMethod):
                         pass
                     closed = False
 
-                resp = addinfourl(fp_dummy(), r.msg, req.get_full_url())
+                resp = urllib.response.addinfourl(fp_dummy(), r.msg, req.get_full_url())
                 resp.code = r.status
                 resp.msg = r.reason
 
@@ -271,17 +257,18 @@ class Wget(FetchMethod):
                 fp.read()
                 fp.close()
 
-                newheaders = dict((k,v) for k,v in list(req.headers.items())
-                                  if k.lower() not in ("content-length", "content-type"))
-                return self.parent.open(urllib.request.Request(req.get_full_url(),
-                                                        headers=newheaders,
-                                                        origin_req_host=req.origin_req_host,
-                                                        unverifiable=True))
+                if req.get_method() != 'GET':
+                    newheaders = dict((k, v) for k, v in list(req.headers.items())
+                                      if k.lower() not in ("content-length", "content-type"))
+                    return self.parent.open(urllib.request.Request(req.get_full_url(),
+                                                            headers=newheaders,
+                                                            origin_req_host=req.origin_req_host,
+                                                            unverifiable=True))
 
-            """
-            Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403
-            Forbidden when they actually mean 405 Method Not Allowed.
-            """
+                raise urllib.request.HTTPError(req, code, msg, headers, None)
+
+            # Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403
+            # Forbidden when they actually mean 405 Method Not Allowed.
             http_error_403 = http_error_405
 
 
@@ -292,15 +279,15 @@ class Wget(FetchMethod):
             """
             def redirect_request(self, req, fp, code, msg, headers, newurl):
                 newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
-                newreq.get_method = lambda: req.get_method()
+                newreq.get_method = req.get_method
                 return newreq
         exported_proxies = export_proxies(d)
 
         handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback]
-        if export_proxies:
+        if exported_proxies:
             handlers.append(urllib.request.ProxyHandler())
         handlers.append(CacheHTTPHandler())
-        # XXX: Since Python 2.7.9 ssl cert validation is enabled by default
+        # Since Python 2.7.9 ssl cert validation is enabled by default
         # see PEP-0476, this causes verification errors on some https servers
         # so disable by default.
         import ssl
@@ -319,19 +306,19 @@ class Wget(FetchMethod):
                 '''Adds Basic auth to http request, pass in login:password as string'''
                 import base64
                 encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8")
-                authheader =  "Basic %s" % encodeuser
+                authheader = "Basic %s" % encodeuser
                 r.add_header("Authorization", authheader)
 
-            if ud.user:
-                add_basic_auth(ud.user, r)
+            if ud.user and ud.pswd:
+                add_basic_auth(ud.user + ':' + ud.pswd, r)
 
             try:
-                import netrc, urllib.parse
+                import netrc
                 n = netrc.netrc()
                 login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname)
                 add_basic_auth("%s:%s" % (login, password), r)
             except (TypeError, ImportError, IOError, netrc.NetrcParseError):
-                 pass
+                pass
 
             with opener.open(r) as response:
                 pass
@@ -396,18 +383,14 @@ class Wget(FetchMethod):
         (oldpn, oldpv, oldsuffix) = old
         (newpn, newpv, newsuffix) = new
 
-        """
-        Check for a new suffix type that we have never heard of before
-        """
-        if (newsuffix):
+        # Check for a new suffix type that we have never heard of before
+        if newsuffix:
             m = self.suffix_regex_comp.search(newsuffix)
             if not m:
                 bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
                 return False
 
-        """
-        Not our package so ignore it
-        """
+        # Not our package so ignore it
         if oldpn != newpn:
             return False
 
@@ -473,15 +456,14 @@ class Wget(FetchMethod):
 
         return ""
 
-    def _check_latest_version_by_dir(self, dirver, package, package_regex,
-            current_version, ud, d):
+    def _check_latest_version_by_dir(self, dirver, package, package_regex, current_version, ud, d):
         """
-            Scan every directory in order to get upstream version.
+        Scan every directory in order to get upstream version.
         """
         version_dir = ['', '', '']
         version = ['', '', '']
 
-        dirver_regex = re.compile("(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])+(\d+))")
+        dirver_regex = re.compile(r"(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])+(\d+))")
         s = dirver_regex.search(dirver)
         if s:
             version_dir[1] = s.group('ver')
@@ -541,26 +523,26 @@ class Wget(FetchMethod):
                 gst-fluendo-mp3
         """
         # match most patterns which uses "-" as separator to version digits
-        pn_prefix1 = "[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
+        pn_prefix1 = r"[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
         # a loose pattern such as for unzip552.tar.gz
-        pn_prefix2 = "[a-zA-Z]+"
+        pn_prefix2 = r"[a-zA-Z]+"
         # a loose pattern such as for 80325-quicky-0.4.tar.gz
-        pn_prefix3 = "[0-9]+[-]?[a-zA-Z]+"
+        pn_prefix3 = r"[0-9]+[-]?[a-zA-Z]+"
         # Save the Package Name (pn) Regex for use later
-        pn_regex = "(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
+        pn_regex = r"(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
 
         # match version
-        pver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
+        pver_regex = r"(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
 
         # match arch
         parch_regex = "-source|_all_"
 
         # src.rpm extension was added only for rpm package. Can be removed if the rpm
         # packaged will always be considered as having to be manually upgraded
-        psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
+        psuffix_regex = r"(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
 
         # match name, version and archive type of a package
-        package_regex_comp = re.compile("(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
+        package_regex_comp = re.compile(r"(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
                                                     % (pn_regex, pver_regex, parch_regex, psuffix_regex))
         self.suffix_regex_comp = re.compile(psuffix_regex)
 
@@ -572,7 +554,7 @@ class Wget(FetchMethod):
             version = self._parse_path(package_regex_comp, package)
             if version:
                 package_custom_regex_comp = re.compile(
-                    "(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
+                    r"(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
                     (re.escape(version[0]), pver_regex, parch_regex, psuffix_regex))
             else:
                 package_custom_regex_comp = None
@@ -589,7 +571,7 @@ class Wget(FetchMethod):
         current_version = ['', d.getVar('PV'), '']
 
         """possible to have no version in pkg name, such as spectrum-fw"""
-        if not re.search("\d+", package):
+        if not re.search(r"\d+", package):
             current_version[1] = re.sub('_', '.', current_version[1])
             current_version[1] = re.sub('-', '.', current_version[1])
             return (current_version[1], '')
@@ -607,13 +589,13 @@ class Wget(FetchMethod):
 
             # search for version matches on folders inside the path, like:
             # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
-            dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
+            dirver_regex = re.compile(r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
             m = dirver_regex.search(path)
             if m:
                 pn = d.getVar('PN')
                 dirver = m.group('dirver')
 
-                dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))
+                dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn)))
                 if not dirver_pn_regex.search(dirver):
                     return (self._check_latest_version_by_dir(dirver,
                         package, package_regex, current_version, ud, d), '')

+ 15 - 26
bitbake/lib/bb/main.py

@@ -1,6 +1,3 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # Copyright (C) 2003, 2004  Chris Larson
 # Copyright (C) 2003, 2004  Phil Blundell
@@ -9,18 +6,8 @@
 # Copyright (C) 2005        ROAD GmbH
 # Copyright (C) 2006        Richard Purdie
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import os
 import sys
@@ -267,6 +254,11 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters):
                           help="Do not run any setscene tasks. sstate will be ignored and "
                                "everything needed, built.")
 
+        parser.add_option("", "--skip-setscene", action="store_true",
+                          dest="skipsetscene", default=False,
+                          help="Skip setscene tasks if they would be executed. Tasks previously "
+                               "restored from sstate will be kept, unlike --no-setscene")
+
         parser.add_option("", "--setscene-only", action="store_true",
                           dest="setsceneonly", default=False,
                           help="Only run setscene tasks, don't run any real tasks.")
@@ -448,7 +440,7 @@ def setup_bitbake(configParams, configuration, extrafeatures=None):
                 else:
                     logger.info("Reconnecting to bitbake server...")
                     if not os.path.exists(sockname):
-                        print("Previous bitbake instance shutting down?, waiting to retry...")
+                        logger.info("Previous bitbake instance shutting down?, waiting to retry...")
                         i = 0
                         lock = None
                         # Wait for 5s or until we can get the lock
@@ -460,12 +452,7 @@ def setup_bitbake(configParams, configuration, extrafeatures=None):
                             bb.utils.unlockfile(lock)
                         raise bb.server.process.ProcessTimeout("Bitbake still shutting down as socket exists but no lock?")
                 if not configParams.server_only:
-                    try:
-                        server_connection = bb.server.process.connectProcessServer(sockname, featureset)
-                    except EOFError:
-                        # The server may have been shutting down but not closed the socket yet. If that happened,
-                        # ignore it.
-                        pass
+                    server_connection = bb.server.process.connectProcessServer(sockname, featureset)
 
                 if server_connection or configParams.server_only:
                     break
@@ -475,12 +462,14 @@ def setup_bitbake(configParams, configuration, extrafeatures=None):
                 if not retries:
                     raise
                 retries -= 1
-                if isinstance(e, (bb.server.process.ProcessTimeout, BrokenPipeError)):
-                    logger.info("Retrying server connection...")
+                tryno = 8 - retries
+                if isinstance(e, (bb.server.process.ProcessTimeout, BrokenPipeError, EOFError)):
+                    logger.info("Retrying server connection (#%d)..." % tryno)
                 else:
-                    logger.info("Retrying server connection... (%s)" % traceback.format_exc())
+                    logger.info("Retrying server connection (#%d)... (%s)" % (tryno, traceback.format_exc()))
             if not retries:
-                bb.fatal("Unable to connect to bitbake server, or start one")
+                bb.fatal("Unable to connect to bitbake server, or start one (server startup failures would be in bitbake-cookerdaemon.log).")
+            bb.event.print_ui_queue()
             if retries < 5:
                 time.sleep(5)
 
@@ -502,7 +491,7 @@ def setup_bitbake(configParams, configuration, extrafeatures=None):
 def lockBitbake():
     topdir = bb.cookerdata.findTopdir()
     if not topdir:
-        bb.error("Unable to find conf/bblayers.conf or conf/bitbake.conf. BBAPTH is unset and/or not in a build directory?")
+        bb.error("Unable to find conf/bblayers.conf or conf/bitbake.conf. BBPATH is unset and/or not in a build directory?")
         raise BBMainFatal
     lockfile = topdir + "/bitbake.lock"
     return topdir, bb.utils.lockfile(lockfile, False, False)

+ 1 - 14
bitbake/lib/bb/methodpool.py

@@ -1,21 +1,8 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
 #
 # Copyright (C)       2006 Holger Hans Peter Freyther
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 from bb.utils import better_compile, better_exec
 

+ 7 - 20
bitbake/lib/bb/monitordisk.py

@@ -1,21 +1,8 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # Copyright (C) 2012 Robert Yang
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import os, logging, re, sys
 import bb
@@ -28,16 +15,16 @@ def convertGMK(unit):
 
     """ Convert the space unit G, M, K, the unit is case-insensitive """
 
-    unitG = re.match('([1-9][0-9]*)[gG]\s?$', unit)
+    unitG = re.match(r'([1-9][0-9]*)[gG]\s?$', unit)
     if unitG:
         return int(unitG.group(1)) * (1024 ** 3)
-    unitM = re.match('([1-9][0-9]*)[mM]\s?$', unit)
+    unitM = re.match(r'([1-9][0-9]*)[mM]\s?$', unit)
     if unitM:
         return int(unitM.group(1)) * (1024 ** 2)
-    unitK = re.match('([1-9][0-9]*)[kK]\s?$', unit)
+    unitK = re.match(r'([1-9][0-9]*)[kK]\s?$', unit)
     if unitK:
         return int(unitK.group(1)) * 1024
-    unitN = re.match('([1-9][0-9]*)\s?$', unit)
+    unitN = re.match(r'([1-9][0-9]*)\s?$', unit)
     if unitN:
         return int(unitN.group(1))
     else:
@@ -83,7 +70,7 @@ def getDiskData(BBDirs, configuration):
     for pathSpaceInode in BBDirs.split():
         # The input format is: "dir,space,inode", dir is a must, space
         # and inode are optional
-        pathSpaceInodeRe = re.match('([^,]*),([^,]*),([^,]*),?(.*)', pathSpaceInode)
+        pathSpaceInodeRe = re.match(r'([^,]*),([^,]*),([^,]*),?(.*)', pathSpaceInode)
         if not pathSpaceInodeRe:
             printErr("Invalid value in BB_DISKMON_DIRS: %s" % pathSpaceInode)
             return None
@@ -147,7 +134,7 @@ def getInterval(configuration):
     else:
         # The disk space or inode interval is optional, but it should
         # have a correct value once it is specified
-        intervalRe = re.match('([^,]*),?\s*(.*)', interval)
+        intervalRe = re.match(r'([^,]*),?\s*(.*)', interval)
         if intervalRe:
             intervalSpace = intervalRe.group(1)
             if intervalSpace:

+ 1 - 13
bitbake/lib/bb/msg.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake 'msg' implementation
 
@@ -9,18 +7,8 @@ Message handling infrastructure for bitbake
 
 # Copyright (C) 2006        Richard Purdie
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import sys
 import copy

+ 3 - 1
bitbake/lib/bb/namedtuple_with_abc.py

@@ -1,6 +1,8 @@
 # http://code.activestate.com/recipes/577629-namedtupleabc-abstract-base-class-mix-in-for-named/
-#!/usr/bin/env python
 # Copyright (c) 2011 Jan Kaliszewski (zuo). Available under the MIT License.
+#
+# SPDX-License-Identifier: MIT
+#
 
 """
 namedtuple_with_abc.py:

+ 2 - 12
bitbake/lib/bb/parse/__init__.py

@@ -9,20 +9,10 @@ File parsers for the BitBake build tools.
 # Copyright (C) 2003, 2004  Chris Larson
 # Copyright (C) 2003, 2004  Phil Blundell
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
 #
 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
+#
 
 handlers = []
 

+ 1 - 14
bitbake/lib/bb/parse/ast.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
  AbstractSyntaxTree classes for the Bitbake language
 """
@@ -8,19 +6,8 @@
 # Copyright (C) 2003, 2004 Phil Blundell
 # Copyright (C) 2009 Holger Hans Peter Freyther
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
 
 import re
 import string

+ 36 - 23
bitbake/lib/bb/parse/parse_py/BBHandler.py

@@ -1,6 +1,3 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
    class for handling .bb files
 
@@ -12,19 +9,8 @@
 #  Copyright (C) 2003, 2004  Chris Larson
 #  Copyright (C) 2003, 2004  Phil Blundell
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
 
 import re, bb, os
 import logging
@@ -38,14 +24,15 @@ from .ConfHandler import include, init
 # For compatibility
 bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
 
-__func_start_regexp__    = re.compile( r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
-__inherit_regexp__       = re.compile( r"inherit\s+(.+)" )
-__export_func_regexp__   = re.compile( r"EXPORT_FUNCTIONS\s+(.+)" )
-__addtask_regexp__       = re.compile("addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
-__deltask_regexp__       = re.compile("deltask\s+(?P<func>\w+)")
-__addhandler_regexp__    = re.compile( r"addhandler\s+(.+)" )
-__def_regexp__           = re.compile( r"def\s+(\w+).*:" )
-__python_func_regexp__   = re.compile( r"(\s+.*)|(^$)|(^#)" )
+__func_start_regexp__    = re.compile(r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
+__inherit_regexp__       = re.compile(r"inherit\s+(.+)" )
+__export_func_regexp__   = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
+__addtask_regexp__       = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
+__deltask_regexp__       = re.compile(r"deltask\s+(?P<func>\w+)(?P<ignores>.*)")
+__addhandler_regexp__    = re.compile(r"addhandler\s+(.+)" )
+__def_regexp__           = re.compile(r"def\s+(\w+).*:" )
+__python_func_regexp__   = re.compile(r"(\s+.*)|(^$)|(^#)" )
+__python_tab_regexp__    = re.compile(r" *\t")
 
 __infunc__ = []
 __inpython__ = False
@@ -160,6 +147,16 @@ def handle(fn, d, include):
 
 def feeder(lineno, s, fn, root, statements, eof=False):
     global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, bb, __residue__, __classname__
+
+    # Check tabs in python functions:
+    # - def py_funcname(): covered by __inpython__
+    # - python(): covered by '__anonymous' == __infunc__[0]
+    # - python funcname(): covered by __infunc__[3]
+    if __inpython__ or (__infunc__ and ('__anonymous' == __infunc__[0] or __infunc__[3])):
+        tab = __python_tab_regexp__.match(s)
+        if tab:
+            bb.warn('python should use 4 spaces indentation, but found tabs in %s, line %s' % (root, lineno))
+
     if __infunc__:
         if s == '}':
             __body__.append('')
@@ -225,11 +222,27 @@ def feeder(lineno, s, fn, root, statements, eof=False):
 
     m = __addtask_regexp__.match(s)
     if m:
+        if len(m.group().split()) == 2:
+            # Check and warn for "addtask task1 task2"
+            m2 = re.match(r"addtask\s+(?P<func>\w+)(?P<ignores>.*)", s)
+            if m2 and m2.group('ignores'):
+                logger.warning('addtask ignored: "%s"' % m2.group('ignores'))
+
+        # Check and warn for "addtask task1 before task2 before task3", the
+        # similar to "after"
+        taskexpression = s.split()
+        for word in ('before', 'after'):
+            if taskexpression.count(word) > 1:
+                logger.warning("addtask contained multiple '%s' keywords, only one is supported" % word)
+
         ast.handleAddTask(statements, fn, lineno, m)
         return
 
     m = __deltask_regexp__.match(s)
     if m:
+        # Check and warn "for deltask task1 task2"
+        if m.group('ignores'):
+            logger.warning('deltask ignored: "%s"' % m.group('ignores'))
         ast.handleDelTask(statements, fn, lineno, m)
         return
 

+ 2 - 15
bitbake/lib/bb/parse/parse_py/ConfHandler.py

@@ -1,6 +1,3 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
    class for handling configuration data files
 
@@ -11,18 +8,8 @@
 # Copyright (C) 2003, 2004  Chris Larson
 # Copyright (C) 2003, 2004  Phil Blundell
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import errno
 import re
@@ -147,7 +134,7 @@ def handle(fn, data, include):
             continue
         s = s.rstrip()
         while s[-1] == '\\':
-            s2 = f.readline().strip()
+            s2 = f.readline().rstrip()
             lineno = lineno + 1
             if (not s2 or s2 and s2[0] != "#") and s[0] == "#" :
                 bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s))

+ 2 - 15
bitbake/lib/bb/parse/parse_py/__init__.py

@@ -1,6 +1,3 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake Parsers
 
@@ -11,20 +8,10 @@ File parsers for the BitBake build tools.
 # Copyright (C) 2003, 2004  Chris Larson
 # Copyright (C) 2003, 2004  Phil Blundell
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
 #
 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
+#
 
 from __future__ import absolute_import
 from . import ConfHandler

+ 159 - 75
bitbake/lib/bb/persist_data.py

@@ -8,18 +8,8 @@ currently, providing a key/value store accessed by 'domain'.
 # Copyright (C) 2007        Richard Purdie
 # Copyright (C) 2010        Chris Larson <chris_larson@mentor.com>
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import collections
 import logging
@@ -29,6 +19,7 @@ import warnings
 from bb.compat import total_ordering
 from collections import Mapping
 import sqlite3
+import contextlib
 
 sqlversion = sqlite3.sqlite_version_info
 if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
@@ -36,84 +27,181 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
 
 
 logger = logging.getLogger("BitBake.PersistData")
-if hasattr(sqlite3, 'enable_shared_cache'):
-    try:
-        sqlite3.enable_shared_cache(True)
-    except sqlite3.OperationalError:
-        pass
-
 
 @total_ordering
 class SQLTable(collections.MutableMapping):
+    class _Decorators(object):
+        @staticmethod
+        def retry(*, reconnect=True):
+            """
+            Decorator that restarts a function if a database locked sqlite
+            exception occurs. If reconnect is True, the database connection
+            will be closed and reopened each time a failure occurs
+            """
+            def retry_wrapper(f):
+                def wrap_func(self, *args, **kwargs):
+                    # Reconnect if necessary
+                    if self.connection is None and reconnect:
+                        self.reconnect()
+
+                    count = 0
+                    while True:
+                        try:
+                            return f(self, *args, **kwargs)
+                        except sqlite3.OperationalError as exc:
+                            if count < 500 and ('is locked' in str(exc) or 'locking protocol' in str(exc)):
+                                count = count + 1
+                                if reconnect:
+                                    self.reconnect()
+                                continue
+                            raise
+                return wrap_func
+            return retry_wrapper
+
+        @staticmethod
+        def transaction(f):
+            """
+            Decorator that starts a database transaction and creates a database
+            cursor for performing queries. If no exception is thrown, the
+            database results are commited. If an exception occurs, the database
+            is rolled back. In all cases, the cursor is closed after the
+            function ends.
+
+            Note that the cursor is passed as an extra argument to the function
+            after `self` and before any of the normal arguments
+            """
+            def wrap_func(self, *args, **kwargs):
+                # Context manager will COMMIT the database on success,
+                # or ROLLBACK on an exception
+                with self.connection:
+                    # Automatically close the cursor when done
+                    with contextlib.closing(self.connection.cursor()) as cursor:
+                        return f(self, cursor, *args, **kwargs)
+            return wrap_func
+
     """Object representing a table/domain in the database"""
     def __init__(self, cachefile, table):
         self.cachefile = cachefile
         self.table = table
-        self.cursor = connect(self.cachefile)
-
-        self._execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);"
-                      % table)
-
-    def _execute(self, *query):
-        """Execute a query, waiting to acquire a lock if necessary"""
-        count = 0
-        while True:
-            try:
-                return self.cursor.execute(*query)
-            except sqlite3.OperationalError as exc:
-                if 'database is locked' in str(exc) and count < 500:
-                    count = count + 1
+
+        self.connection = None
+        self._execute_single("CREATE TABLE IF NOT EXISTS %s(key TEXT PRIMARY KEY NOT NULL, value TEXT);" % table)
+
+    @_Decorators.retry(reconnect=False)
+    @_Decorators.transaction
+    def _setup_database(self, cursor):
+        cursor.execute("pragma synchronous = off;")
+        # Enable WAL and keep the autocheckpoint length small (the default is
+        # usually 1000). Persistent caches are usually read-mostly, so keeping
+        # this short will keep readers running quickly
+        cursor.execute("pragma journal_mode = WAL;")
+        cursor.execute("pragma wal_autocheckpoint = 100;")
+
+    def reconnect(self):
+        if self.connection is not None:
+            self.connection.close()
+        self.connection = sqlite3.connect(self.cachefile, timeout=5)
+        self.connection.text_factory = str
+        self._setup_database()
+
+    @_Decorators.retry()
+    @_Decorators.transaction
+    def _execute_single(self, cursor, *query):
+        """
+        Executes a single query and discards the results. This correctly closes
+        the database cursor when finished
+        """
+        cursor.execute(*query)
+
+    @_Decorators.retry()
+    def _row_iter(self, f, *query):
+        """
+        Helper function that returns a row iterator. Each time __next__ is
+        called on the iterator, the provided function is evaluated to determine
+        the return value
+        """
+        class CursorIter(object):
+            def __init__(self, cursor):
+                self.cursor = cursor
+
+            def __iter__(self):
+                return self
+
+            def __next__(self):
+                row = self.cursor.fetchone()
+                if row is None:
                     self.cursor.close()
-                    self.cursor = connect(self.cachefile)
-                    continue
-                raise
+                    raise StopIteration
+                return f(row)
+
+            def __enter__(self):
+                return self
+
+            def __exit__(self, typ, value, traceback):
+                self.cursor.close()
+                return False
+
+        cursor = self.connection.cursor()
+        try:
+            cursor.execute(*query)
+            return CursorIter(cursor)
+        except:
+            cursor.close()
 
     def __enter__(self):
-        self.cursor.__enter__()
+        self.connection.__enter__()
         return self
 
     def __exit__(self, *excinfo):
-        self.cursor.__exit__(*excinfo)
-
-    def __getitem__(self, key):
-        data = self._execute("SELECT * from %s where key=?;" %
-                             self.table, [key])
-        for row in data:
+        self.connection.__exit__(*excinfo)
+
+    @_Decorators.retry()
+    @_Decorators.transaction
+    def __getitem__(self, cursor, key):
+        cursor.execute("SELECT * from %s where key=?;" % self.table, [key])
+        row = cursor.fetchone()
+        if row is not None:
             return row[1]
         raise KeyError(key)
 
-    def __delitem__(self, key):
+    @_Decorators.retry()
+    @_Decorators.transaction
+    def __delitem__(self, cursor, key):
         if key not in self:
             raise KeyError(key)
-        self._execute("DELETE from %s where key=?;" % self.table, [key])
+        cursor.execute("DELETE from %s where key=?;" % self.table, [key])
 
-    def __setitem__(self, key, value):
+    @_Decorators.retry()
+    @_Decorators.transaction
+    def __setitem__(self, cursor, key, value):
         if not isinstance(key, str):
             raise TypeError('Only string keys are supported')
         elif not isinstance(value, str):
             raise TypeError('Only string values are supported')
 
-        data = self._execute("SELECT * from %s where key=?;" %
-                                   self.table, [key])
-        exists = len(list(data))
-        if exists:
-            self._execute("UPDATE %s SET value=? WHERE key=?;" % self.table,
-                          [value, key])
+        cursor.execute("SELECT * from %s where key=?;" % self.table, [key])
+        row = cursor.fetchone()
+        if row is not None:
+            cursor.execute("UPDATE %s SET value=? WHERE key=?;" % self.table, [value, key])
         else:
-            self._execute("INSERT into %s(key, value) values (?, ?);" %
-                          self.table, [key, value])
-
-    def __contains__(self, key):
-        return key in set(self)
-
-    def __len__(self):
-        data = self._execute("SELECT COUNT(key) FROM %s;" % self.table)
-        for row in data:
+            cursor.execute("INSERT into %s(key, value) values (?, ?);" % self.table, [key, value])
+
+    @_Decorators.retry()
+    @_Decorators.transaction
+    def __contains__(self, cursor, key):
+        cursor.execute('SELECT * from %s where key=?;' % self.table, [key])
+        return cursor.fetchone() is not None
+
+    @_Decorators.retry()
+    @_Decorators.transaction
+    def __len__(self, cursor):
+        cursor.execute("SELECT COUNT(key) FROM %s;" % self.table)
+        row = cursor.fetchone()
+        if row is not None:
             return row[0]
 
     def __iter__(self):
-        data = self._execute("SELECT key FROM %s;" % self.table)
-        return (row[0] for row in data)
+        return self._row_iter(lambda row: row[0], "SELECT key from %s;" % self.table)
 
     def __lt__(self, other):
         if not isinstance(other, Mapping):
@@ -122,25 +210,27 @@ class SQLTable(collections.MutableMapping):
         return len(self) < len(other)
 
     def get_by_pattern(self, pattern):
-        data = self._execute("SELECT * FROM %s WHERE key LIKE ?;" %
-                             self.table, [pattern])
-        return [row[1] for row in data]
+        return self._row_iter(lambda row: row[1], "SELECT * FROM %s WHERE key LIKE ?;" %
+                              self.table, [pattern])
 
     def values(self):
         return list(self.itervalues())
 
     def itervalues(self):
-        data = self._execute("SELECT value FROM %s;" % self.table)
-        return (row[0] for row in data)
+        return self._row_iter(lambda row: row[0], "SELECT value FROM %s;" %
+                              self.table)
 
     def items(self):
         return list(self.iteritems())
 
     def iteritems(self):
-        return self._execute("SELECT * FROM %s;" % self.table)
+        return self._row_iter(lambda row: (row[0], row[1]), "SELECT * FROM %s;" %
+                              self.table)
 
-    def clear(self):
-        self._execute("DELETE FROM %s;" % self.table)
+    @_Decorators.retry()
+    @_Decorators.transaction
+    def clear(self, cursor):
+        cursor.execute("DELETE FROM %s;" % self.table)
 
     def has_key(self, key):
         return key in self
@@ -194,12 +284,6 @@ class PersistData(object):
         """
         del self.data[domain][key]
 
-def connect(database):
-    connection = sqlite3.connect(database, timeout=5, isolation_level=None)
-    connection.execute("pragma synchronous = off;")
-    connection.text_factory = str
-    return connection
-
 def persist(domain, d):
     """Convenience factory for SQLTable objects based upon metadata"""
     import bb.utils

+ 4 - 0
bitbake/lib/bb/process.py

@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
 import logging
 import signal
 import subprocess

+ 16 - 12
bitbake/lib/bb/progress.py

@@ -4,18 +4,8 @@ BitBake progress handling code
 
 # Copyright (C) 2016 Intel Corporation
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import sys
 import re
@@ -23,6 +13,7 @@ import time
 import inspect
 import bb.event
 import bb.build
+from bb.build import StdoutNoopContextManager
 
 class ProgressHandler(object):
     """
@@ -37,7 +28,14 @@ class ProgressHandler(object):
         if outfile:
             self._outfile = outfile
         else:
-            self._outfile = sys.stdout
+            self._outfile = StdoutNoopContextManager()
+
+    def __enter__(self):
+        self._outfile.__enter__()
+        return self
+
+    def __exit__(self, *excinfo):
+        self._outfile.__exit__(*excinfo)
 
     def _fire_progress(self, taskprogress, rate=None):
         """Internal function to fire the progress event"""
@@ -157,6 +155,12 @@ class MultiStageProgressReporter(object):
             self._stage_total = None
             self._callers = []
 
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *excinfo):
+        pass
+
     def _fire_progress(self, taskprogress):
         bb.event.fire(bb.build.TaskProgress(taskprogress), self._data)
 

+ 3 - 15
bitbake/lib/bb/providers.py

@@ -1,5 +1,3 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # Copyright (C) 2003, 2004  Chris Larson
 # Copyright (C) 2003, 2004  Phil Blundell
@@ -8,18 +6,8 @@
 # Copyright (C) 2005        ROAD GmbH
 # Copyright (C) 2006        Richard Purdie
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import re
 import logging
@@ -129,7 +117,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
         preferred_v = cfgData.getVar("PREFERRED_VERSION")
 
     if preferred_v:
-        m = re.match('(\d+:)*(.*)(_.*)*', preferred_v)
+        m = re.match(r'(\d+:)*(.*)(_.*)*', preferred_v)
         if m:
             if m.group(1):
                 preferred_e = m.group(1)[:-1]
@@ -384,7 +372,7 @@ def getRuntimeProviders(dataCache, rdepend):
 
     # Only search dynamic packages if we can't find anything in other variables
     for pattern in dataCache.packages_dynamic:
-        pattern = pattern.replace('+', "\+")
+        pattern = pattern.replace(r'+', r"\+")
         if pattern in regexp_cache:
             regexp = regexp_cache[pattern]
         else:

+ 0 - 710
bitbake/lib/bb/pysh/builtin.py

@@ -1,710 +0,0 @@
-# builtin.py - builtins and utilities definitions for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-"""Builtin and internal utilities implementations.
-
-- Beware not to use python interpreter environment as if it were the shell
-environment. For instance, commands working directory must be explicitely handled
-through env['PWD'] instead of relying on python working directory.
-"""
-import errno
-import optparse
-import os
-import re
-import subprocess
-import sys
-import time
-
-def has_subprocess_bug():
-    return getattr(subprocess, 'list2cmdline') and \
-       (    subprocess.list2cmdline(['']) == '' or \
-            subprocess.list2cmdline(['foo|bar']) == 'foo|bar')
-            
-# Detect python bug 1634343: "subprocess swallows empty arguments under win32"
-# <http://sourceforge.net/tracker/index.php?func=detail&aid=1634343&group_id=5470&atid=105470>
-# Also detect: "[ 1710802 ] subprocess must escape redirection characters under win32"
-# <http://sourceforge.net/tracker/index.php?func=detail&aid=1710802&group_id=5470&atid=105470>
-if has_subprocess_bug():
-    import subprocess_fix
-    subprocess.list2cmdline = subprocess_fix.list2cmdline
-
-from sherrors import *
-
-class NonExitingParser(optparse.OptionParser):
-    """OptionParser default behaviour upon error is to print the error message and
-    exit. Raise a utility error instead.
-    """
-    def error(self, msg):
-        raise UtilityError(msg)
-
-#-------------------------------------------------------------------------------  
-# set special builtin
-#-------------------------------------------------------------------------------  
-OPT_SET = NonExitingParser(usage="set - set or unset options and positional parameters")
-OPT_SET.add_option( '-f', action='store_true', dest='has_f', default=False,
-    help='The shell shall disable pathname expansion.')
-OPT_SET.add_option('-e', action='store_true', dest='has_e', default=False,
-    help="""When this option is on, if a simple command fails for any of the \
-    reasons listed in Consequences of Shell Errors or returns an exit status \
-    value >0, and is not part of the compound list following a while, until, \
-    or if keyword, and is not a part of an AND or OR list, and is not a \
-    pipeline preceded by the ! reserved word, then the shell shall immediately \
-    exit.""")
-OPT_SET.add_option('-x', action='store_true', dest='has_x', default=False,
-    help="""The shell shall write to standard error a trace for each command \
-    after it expands the command and before it executes it. It is unspecified \
-    whether the command that turns tracing off is traced.""")
-
-def builtin_set(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
-    option, args = OPT_SET.parse_args(args)
-    env = interp.get_env()
-    
-    if option.has_f:
-        env.set_opt('-f')
-    if option.has_e:
-        env.set_opt('-e')
-    if option.has_x:
-        env.set_opt('-x')
-    return 0
-    
-#-------------------------------------------------------------------------------  
-# shift special builtin
-#-------------------------------------------------------------------------------  
-def builtin_shift(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-        
-    params = interp.get_env().get_positional_args()
-    if args:
-        try:
-            n = int(args[0])
-            if n > len(params):
-                raise ValueError()
-        except ValueError:
-            return 1
-    else:
-        n = 1
-        
-    params[:n] = []
-    interp.get_env().set_positional_args(params)
-    return 0
-    
-#-------------------------------------------------------------------------------  
-# export special builtin
-#-------------------------------------------------------------------------------  
-OPT_EXPORT = NonExitingParser(usage="set - set or unset options and positional parameters")
-OPT_EXPORT.add_option('-p', action='store_true', dest='has_p', default=False)
-
-def builtin_export(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-        
-    option, args = OPT_EXPORT.parse_args(args)
-    if option.has_p:
-        raise NotImplementedError()
-    
-    for arg in args:
-        try:
-            name, value = arg.split('=', 1)
-        except ValueError:
-            name, value = arg, None
-        env = interp.get_env().export(name, value)
-    
-    return 0
-    
-#-------------------------------------------------------------------------------  
-# return special builtin
-#-------------------------------------------------------------------------------  
-def builtin_return(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-    res = 0
-    if args:
-        try:
-            res = int(args[0])
-        except ValueError:
-            res = 0
-        if not 0<=res<=255:
-            res = 0
-            
-    # BUG: should be last executed command exit code        
-    raise ReturnSignal(res)
-
-#-------------------------------------------------------------------------------  
-# trap special builtin
-#-------------------------------------------------------------------------------  
-def builtin_trap(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-    if len(args) < 2:
-        stderr.write('trap: usage: trap [[arg] signal_spec ...]\n')
-        return 2
-
-    action = args[0]
-    for sig in args[1:]:
-        try:
-            env.traps[sig] = action
-        except Exception as e:
-            stderr.write('trap: %s\n' % str(e))
-    return 0
-
-#-------------------------------------------------------------------------------  
-# unset special builtin
-#-------------------------------------------------------------------------------
-OPT_UNSET = NonExitingParser("unset - unset values and attributes of variables and functions")
-OPT_UNSET.add_option( '-f', action='store_true', dest='has_f', default=False)
-OPT_UNSET.add_option( '-v', action='store_true', dest='has_v', default=False)
-
-def builtin_unset(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-        
-    option, args = OPT_UNSET.parse_args(args)
-    
-    status = 0
-    env = interp.get_env()
-    for arg in args:    
-        try:
-            if option.has_f:
-                env.remove_function(arg)
-            else:
-                del env[arg]
-        except KeyError:
-            pass
-        except VarAssignmentError:
-            status = 1
-            
-    return status
-
-#-------------------------------------------------------------------------------  
-# wait special builtin
-#-------------------------------------------------------------------------------  
-def builtin_wait(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
-    return interp.wait([int(arg) for arg in args])
-
-#-------------------------------------------------------------------------------  
-# cat utility
-#-------------------------------------------------------------------------------
-def utility_cat(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
-    if not args:
-        args = ['-']
-
-    status = 0
-    for arg in args:
-        if arg == '-':
-            data = stdin.read()
-        else:
-            path = os.path.join(env['PWD'], arg)
-            try:
-                f = file(path, 'rb')
-                try:
-                    data = f.read()
-                finally:
-                    f.close()
-            except IOError as e:
-                if e.errno != errno.ENOENT:
-                    raise
-                status = 1
-                continue
-        stdout.write(data)
-        stdout.flush()
-    return status
-    
-#-------------------------------------------------------------------------------  
-# cd utility
-#-------------------------------------------------------------------------------  
-OPT_CD = NonExitingParser("cd - change the working directory")
-
-def utility_cd(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
-    option, args = OPT_CD.parse_args(args)
-    env = interp.get_env()
-    
-    directory = None
-    printdir = False
-    if not args:
-        home = env.get('HOME')
-        if home:
-            # Unspecified, do nothing
-            return 0
-        else:
-            directory = home
-    elif len(args)==1:
-        directory = args[0]
-        if directory=='-':
-            if 'OLDPWD' not in env:
-                raise UtilityError("OLDPWD not set")
-            printdir = True
-            directory = env['OLDPWD']
-    else:
-        raise UtilityError("too many arguments")
-            
-    curpath = None
-    # Absolute directories will be handled correctly by the os.path.join call.
-    if not directory.startswith('.') and not directory.startswith('..'):
-        cdpaths = env.get('CDPATH', '.').split(';')
-        for cdpath in cdpaths:
-            p = os.path.join(cdpath, directory)
-            if os.path.isdir(p):
-                curpath = p
-                break
-    
-    if curpath is None:
-        curpath = directory
-    curpath = os.path.join(env['PWD'], directory)
-
-    env['OLDPWD'] = env['PWD']
-    env['PWD'] = curpath
-    if printdir:
-        stdout.write('%s\n' % curpath)
-    return 0
-
-#-------------------------------------------------------------------------------  
-# colon utility
-#-------------------------------------------------------------------------------  
-def utility_colon(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-    return 0
-    
-#-------------------------------------------------------------------------------  
-# echo utility
-#-------------------------------------------------------------------------------  
-def utility_echo(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-        
-    # Echo only takes arguments, no options. Use printf if you need fancy stuff.
-    output = ' '.join(args) + '\n'
-    stdout.write(output)
-    stdout.flush()
-    return 0
-    
-#-------------------------------------------------------------------------------  
-# egrep utility
-#-------------------------------------------------------------------------------
-# egrep is usually a shell script.
-# Unfortunately, pysh does not support shell scripts *with arguments* right now,
-# so the redirection is implemented here, assuming grep is available.
-def utility_egrep(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-        
-    return run_command('grep', ['-E'] + args, interp, env, stdin, stdout, 
-        stderr, debugflags)
-    
-#-------------------------------------------------------------------------------  
-# env utility
-#-------------------------------------------------------------------------------  
-def utility_env(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-    
-    if args and args[0]=='-i':
-        raise NotImplementedError('env: -i option is not implemented')
-    
-    i = 0
-    for arg in args:
-        if '=' not in arg:
-            break
-        # Update the current environment
-        name, value = arg.split('=', 1)
-        env[name] = value
-        i += 1
-        
-    if args[i:]:
-        # Find then execute the specified interpreter
-        utility = env.find_in_path(args[i])
-        if not utility:
-            return 127
-        args[i:i+1] = utility
-        name = args[i]
-        args = args[i+1:]
-        try:
-            return run_command(name, args, interp, env, stdin, stdout, stderr, 
-                debugflags)
-        except UtilityError:
-            stderr.write('env: failed to execute %s' % ' '.join([name]+args))
-            return 126            
-    else:
-        for pair in env.get_variables().iteritems():
-            stdout.write('%s=%s\n' % pair)
-    return 0
-    
-#-------------------------------------------------------------------------------  
-# exit utility
-#-------------------------------------------------------------------------------
-def utility_exit(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-        
-    res = None
-    if args:
-        try:
-            res = int(args[0])
-        except ValueError:
-            res = None
-        if not 0<=res<=255:
-            res = None
-            
-    if res is None:
-        # BUG: should be last executed command exit code
-        res = 0
-        
-    raise ExitSignal(res)
-
-#-------------------------------------------------------------------------------  
-# fgrep utility
-#-------------------------------------------------------------------------------
-# see egrep
-def utility_fgrep(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-        
-    return run_command('grep', ['-F'] + args, interp, env, stdin, stdout, 
-        stderr, debugflags)
-
-#-------------------------------------------------------------------------------  
-# gunzip utility
-#-------------------------------------------------------------------------------
-# see egrep
-def utility_gunzip(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-        
-    return run_command('gzip', ['-d'] + args, interp, env, stdin, stdout, 
-        stderr, debugflags)
-    
-#-------------------------------------------------------------------------------  
-# kill utility
-#-------------------------------------------------------------------------------
-def utility_kill(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-        
-    for arg in args:
-        pid = int(arg)
-        status = subprocess.call(['pskill', '/T', str(pid)],
-                                shell=True,
-                                stdout=subprocess.PIPE,
-                                stderr=subprocess.PIPE)
-        # pskill is asynchronous, hence the stupid polling loop
-        while 1:
-            p = subprocess.Popen(['pslist', str(pid)],
-                                shell=True,
-                                stdout=subprocess.PIPE,
-                                stderr=subprocess.STDOUT)
-            output = p.communicate()[0]
-            if ('process %d was not' % pid) in output:
-                break
-            time.sleep(1)
-    return status
-    
-#-------------------------------------------------------------------------------  
-# mkdir utility
-#-------------------------------------------------------------------------------
-OPT_MKDIR = NonExitingParser("mkdir - make directories.")
-OPT_MKDIR.add_option('-p', action='store_true', dest='has_p', default=False)
-
-def utility_mkdir(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-        
-    # TODO: implement umask
-    # TODO: implement proper utility error report
-    option, args = OPT_MKDIR.parse_args(args)
-    for arg in args:
-        path = os.path.join(env['PWD'], arg)
-        if option.has_p:
-            try:
-                os.makedirs(path)
-            except IOError as e:
-                if e.errno != errno.EEXIST:
-                    raise
-        else:               
-            os.mkdir(path)
-    return 0
-
-#-------------------------------------------------------------------------------  
-# netstat utility
-#-------------------------------------------------------------------------------
-def utility_netstat(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    # Do you really expect me to implement netstat ?
-    # This empty form is enough for Mercurial tests since it's
-    # supposed to generate nothing upon success. Faking this test
-    # is not a big deal either.
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-    return 0
-    
-#-------------------------------------------------------------------------------  
-# pwd utility
-#-------------------------------------------------------------------------------  
-OPT_PWD = NonExitingParser("pwd - return working directory name")
-OPT_PWD.add_option('-L', action='store_true', dest='has_L', default=True,
-    help="""If the PWD environment variable contains an absolute pathname of \
-    the current directory that does not contain the filenames dot or dot-dot, \
-    pwd shall write this pathname to standard output. Otherwise, the -L option \
-    shall behave as the -P option.""")
-OPT_PWD.add_option('-P', action='store_true', dest='has_L', default=False,
-    help="""The absolute pathname written shall not contain filenames that, in \
-    the context of the pathname, refer to files of type symbolic link.""")
-
-def utility_pwd(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
-    option, args = OPT_PWD.parse_args(args)        
-    stdout.write('%s\n' % env['PWD'])
-    return 0
-    
-#-------------------------------------------------------------------------------  
-# printf utility
-#-------------------------------------------------------------------------------
-RE_UNESCAPE = re.compile(r'(\\x[a-zA-Z0-9]{2}|\\[0-7]{1,3}|\\.)')
-
-def utility_printf(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-        
-    def replace(m):
-        assert m.group()
-        g = m.group()[1:]
-        if g.startswith('x'):
-            return chr(int(g[1:], 16))
-        if len(g) <= 3 and len([c for c in g if c in '01234567']) == len(g):
-            # Yay, an octal number
-            return chr(int(g, 8))
-        return {
-            'a': '\a',
-            'b': '\b',
-            'f': '\f',
-            'n': '\n',
-            'r': '\r',
-            't': '\t',
-            'v': '\v',
-            '\\': '\\',
-        }.get(g)
-        
-    # Convert escape sequences
-    format = re.sub(RE_UNESCAPE, replace, args[0])
-    stdout.write(format % tuple(args[1:]))
-    return 0
-    
-#-------------------------------------------------------------------------------  
-# true utility
-#-------------------------------------------------------------------------------
-def utility_true(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-    return 0
-
-#-------------------------------------------------------------------------------  
-# sed utility
-#-------------------------------------------------------------------------------
-RE_SED = re.compile(r'^s(.).*\1[a-zA-Z]*$')
-
-# cygwin sed fails with some expressions when they do not end with a single space.
-# see unit tests for details. Interestingly, the same expressions works perfectly
-# in cygwin shell.
-def utility_sed(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-        
-    # Scan pattern arguments and append a space if necessary
-    for i in range(len(args)):
-        if not RE_SED.search(args[i]):
-            continue
-        args[i] = args[i] + ' '
-
-    return run_command(name, args, interp, env, stdin, stdout, 
-        stderr, debugflags)
-
-#-------------------------------------------------------------------------------  
-# sleep utility
-#-------------------------------------------------------------------------------
-def utility_sleep(name, args, interp, env, stdin, stdout, stderr, debugflags):
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-    time.sleep(int(args[0]))
-    return 0
-    
-#-------------------------------------------------------------------------------  
-# sort utility
-#-------------------------------------------------------------------------------
-OPT_SORT = NonExitingParser("sort - sort, merge, or sequence check text files")
-
-def utility_sort(name, args, interp, env, stdin, stdout, stderr, debugflags):
-
-    def sort(path):
-        if path == '-':
-            lines = stdin.readlines()
-        else:
-            try:
-                f = file(path)
-                try:
-                    lines = f.readlines()
-                finally:
-                    f.close()
-            except IOError as e:
-                stderr.write(str(e) + '\n')
-                return 1
-        
-        if lines and lines[-1][-1]!='\n':
-            lines[-1] = lines[-1] + '\n'
-        return lines
-    
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-        
-    option, args = OPT_SORT.parse_args(args)
-    alllines = []
-    
-    if len(args)<=0:
-        args += ['-']
-        
-    # Load all files lines
-    curdir = os.getcwd()
-    try:
-        os.chdir(env['PWD'])
-        for path in args:
-            alllines += sort(path)
-    finally:
-        os.chdir(curdir)
-            
-    alllines.sort()
-    for line in alllines:
-        stdout.write(line)
-    return 0
-    
-#-------------------------------------------------------------------------------
-# hg utility
-#-------------------------------------------------------------------------------
-
-hgcommands = [
-    'add',
-    'addremove',
-    'commit', 'ci',
-    'debugrename',
-    'debugwalk',
-    'falabala', # Dummy command used in a mercurial test
-    'incoming',
-    'locate',
-    'pull',
-    'push',
-    'qinit',
-    'remove', 'rm',
-    'rename', 'mv',
-    'revert',    
-    'showconfig',
-    'status', 'st',
-    'strip',
-    ]
-
-def rewriteslashes(name, args):
-    # Several hg commands output file paths, rewrite the separators
-    if len(args) > 1 and name.lower().endswith('python') \
-       and args[0].endswith('hg'):
-        for cmd in hgcommands:
-            if cmd in args[1:]:
-                return True
-            
-    # svn output contains many paths with OS specific separators.
-    # Normalize these to unix paths.
-    base = os.path.basename(name)
-    if base.startswith('svn'):
-        return True
-    
-    return False
-
-def rewritehg(output):
-    if not output:
-        return output
-    # Rewrite os specific messages
-    output = output.replace(': The system cannot find the file specified',
-                            ': No such file or directory')
-    output = re.sub(': Access is denied.*$', ': Permission denied', output)
-    output = output.replace(': No connection could be made because the target machine actively refused it',
-                            ': Connection refused')
-    return output
-                            
-
-def run_command(name, args, interp, env, stdin, stdout,
-                stderr, debugflags):
-    # Execute the command
-    if 'debug-utility' in debugflags:
-        print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
-    hgbin = interp.options().hgbinary
-    ishg = hgbin and ('hg' in name or args and 'hg' in args[0])
-    unixoutput = 'cygwin' in name or ishg
-    
-    exec_env = env.get_variables()        
-    try:
-        # BUG: comparing file descriptor is clearly not a reliable way to tell
-        # whether they point on the same underlying object. But in pysh limited
-        # scope this is usually right, we do not expect complicated redirections
-        # besides usual 2>&1.
-        # Still there is one case we have but cannot deal with is when stdout
-        # and stderr are redirected *by pysh caller*. This the reason for the
-        # --redirect pysh() option.
-        # Now, we want to know they are the same because we sometimes need to 
-        # transform the command output, mostly remove CR-LF to ensure that
-        # command output is unix-like. Cygwin utilies are a special case because
-        # they explicitely set their output streams to binary mode, so we have
-        # nothing to do. For all others commands, we have to guess whether they
-        # are sending text data, in which case the transformation must be done.
-        # Again, the NUL character test is unreliable but should be enough for
-        # hg tests.
-        redirected = stdout.fileno()==stderr.fileno()
-        if not redirected:
-            p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env, 
-                    stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-        else:
-            p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env, 
-                    stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-        out, err = p.communicate()
-    except WindowsError as e:
-        raise UtilityError(str(e))
-
-    if not unixoutput:
-        def encode(s):
-            if '\0' in s:
-                return s
-            return s.replace('\r\n', '\n')
-    else:
-        encode = lambda s: s
-
-    if rewriteslashes(name, args):
-        encode1_ = encode
-        def encode(s):
-            s = encode1_(s)
-            s = s.replace('\\\\', '\\')
-            s = s.replace('\\', '/')
-            return s
-
-    if ishg:
-        encode2_ = encode
-        def encode(s):
-            return rewritehg(encode2_(s))
-    
-    stdout.write(encode(out))
-    if not redirected:
-        stderr.write(encode(err))
-    return p.returncode
-            

+ 0 - 1367
bitbake/lib/bb/pysh/interp.py

@@ -1,1367 +0,0 @@
-# interp.py - shell interpreter for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-"""Implement the shell interpreter.
-
-Most references are made to "The Open Group Base Specifications Issue 6".
-<http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html>
-"""
-# TODO: document the fact input streams must implement fileno() so Popen will work correctly.
-# it requires non-stdin stream to be implemented as files. Still to be tested...
-# DOC: pathsep is used in PATH instead of ':'. Clearly, there are path syntax issues here.
-# TODO: stop command execution upon error.
-# TODO: sort out the filename/io_number mess. It should be possible to use filenames only.
-# TODO: review subshell implementation
-# TODO: test environment cloning for non-special builtins
-# TODO: set -x should not rebuild commands from tokens, assignments/redirections are lost
-# TODO: unit test for variable assignment
-# TODO: test error management wrt error type/utility type
-# TODO: test for binary output everywhere
-# BUG: debug-parsing does not pass log file to PLY. Maybe a PLY upgrade is necessary.
-import base64
-import cPickle as pickle
-import errno
-import glob
-import os
-import re
-import subprocess
-import sys
-import tempfile
-
-try:
-    s = set()
-    del s
-except NameError:
-    from Set import Set as set
-
-import builtin
-from sherrors import *
-import pyshlex
-import pyshyacc
-
-def mappend(func, *args, **kargs):
-    """Like map but assume func returns a list. Returned lists are merged into
-    a single one.
-    """
-    return reduce(lambda a,b: a+b, map(func, *args, **kargs), [])
-
-class FileWrapper:
-    """File object wrapper to ease debugging.
-    
-    Allow mode checking and implement file duplication through a simple 
-    reference counting scheme. Not sure the latter is really useful since
-    only real file descriptors can be used.
-    """
-    def __init__(self, mode, file, close=True):
-        if mode not in ('r', 'w', 'a'):
-            raise IOError('invalid mode: %s' % mode)
-        self._mode = mode
-        self._close = close
-        if isinstance(file, FileWrapper):
-            if file._refcount[0] <= 0:
-                raise IOError(0, 'Error')
-            self._refcount = file._refcount
-            self._refcount[0] += 1
-            self._file = file._file
-        else:
-            self._refcount = [1]
-            self._file = file
-        
-    def dup(self):
-        return FileWrapper(self._mode, self, self._close)
-        
-    def fileno(self):
-        """fileno() should be only necessary for input streams."""
-        return self._file.fileno()
-        
-    def read(self, size=-1):
-        if self._mode!='r':
-            raise IOError(0, 'Error')
-        return self._file.read(size)
-        
-    def readlines(self, *args, **kwargs):
-        return self._file.readlines(*args, **kwargs)
-        
-    def write(self, s):
-        if self._mode not in ('w', 'a'):
-            raise IOError(0, 'Error')
-        return self._file.write(s)
-        
-    def flush(self):
-        self._file.flush()
-        
-    def close(self):        
-        if not self._refcount:
-            return
-        assert  self._refcount[0] > 0
-        
-        self._refcount[0] -= 1    
-        if self._refcount[0] == 0:
-            self._mode = 'c'
-            if self._close:
-                self._file.close()
-        self._refcount = None
-                
-    def mode(self):
-        return self._mode
-
-    def __getattr__(self, name):
-        if name == 'name':
-            self.name = getattr(self._file, name)
-            return self.name
-        else:
-            raise AttributeError(name)
-
-    def __del__(self):
-        self.close()
-               
-               
-def win32_open_devnull(mode):
-    return open('NUL', mode)
-    
-        
-class Redirections:
-    """Stores open files and their mapping to pseudo-sh file descriptor.
-    """
-    # BUG: redirections are not handled correctly: 1>&3 2>&3 3>&4 does 
-    # not make 1 to redirect to 4
-    def __init__(self, stdin=None, stdout=None, stderr=None):
-        self._descriptors = {}
-        if stdin is not None:
-            self._add_descriptor(0, stdin)
-        if stdout is not None:
-            self._add_descriptor(1, stdout)
-        if stderr is not None:
-            self._add_descriptor(2, stderr)
-            
-    def add_here_document(self, interp, name, content, io_number=None):
-        if io_number is None:
-            io_number = 0
-            
-        if name==pyshlex.unquote_wordtree(name):
-            content = interp.expand_here_document(('TOKEN', content))
-    
-        # Write document content in a temporary file
-        tmp = tempfile.TemporaryFile()
-        try:
-            tmp.write(content)
-            tmp.flush()
-            tmp.seek(0)
-            self._add_descriptor(io_number, FileWrapper('r', tmp))
-        except:
-            tmp.close()
-            raise                
-        
-    def add(self, interp, op, filename, io_number=None):
-        if op not in ('<', '>', '>|', '>>', '>&'):
-            # TODO: add descriptor duplication and here_documents      
-            raise RedirectionError('Unsupported redirection operator "%s"' % op)
-            
-        if io_number is not None:
-            io_number = int(io_number)
-            
-        if (op == '>&' and filename.isdigit()) or filename=='-':
-            # No expansion for file descriptors, quote them if you want a filename
-            fullname = filename
-        else:
-            if filename.startswith('/'):
-                # TODO: win32 kludge
-                if filename=='/dev/null':
-                    fullname = 'NUL'
-                else:
-                    # TODO: handle absolute pathnames, they are unlikely to exist on the
-                    # current platform (win32 for instance).
-                    raise NotImplementedError()
-            else:
-                fullname = interp.expand_redirection(('TOKEN', filename))
-                if not fullname:
-                    raise RedirectionError('%s: ambiguous redirect' % filename)
-                # Build absolute path based on PWD
-                fullname = os.path.join(interp.get_env()['PWD'], fullname)
-            
-        if op=='<':
-            return self._add_input_redirection(interp, fullname, io_number)
-        elif op in ('>', '>|'):
-            clobber = ('>|'==op)
-            return self._add_output_redirection(interp, fullname, io_number, clobber)
-        elif op=='>>':
-            return self._add_output_appending(interp, fullname, io_number)
-        elif op=='>&':
-            return self._dup_output_descriptor(fullname, io_number)
-        
-    def close(self):
-        if self._descriptors is not None:
-            for desc in self._descriptors.itervalues():
-                desc.flush()
-                desc.close()
-            self._descriptors = None
-            
-    def stdin(self):
-        return self._descriptors[0]
-          
-    def stdout(self):
-        return self._descriptors[1] 
-        
-    def stderr(self):
-        return self._descriptors[2] 
-            
-    def clone(self):
-        clone = Redirections()
-        for desc, fileobj in self._descriptors.iteritems():
-            clone._descriptors[desc] = fileobj.dup()
-        return clone
-           
-    def _add_output_redirection(self, interp, filename, io_number, clobber):    
-        if io_number is None:
-            # io_number default to standard output
-            io_number = 1
-        
-        if not clobber and interp.get_env().has_opt('-C') and os.path.isfile(filename):
-            # File already exist in no-clobber mode, bail out
-            raise RedirectionError('File "%s" already exists' % filename)
-            
-        # Open and register
-        self._add_file_descriptor(io_number, filename, 'w')
-        
-    def _add_output_appending(self, interp, filename, io_number):    
-        if io_number is None:
-            io_number = 1
-        self._add_file_descriptor(io_number, filename, 'a')
-            
-    def _add_input_redirection(self, interp, filename, io_number):
-        if io_number is None:
-            io_number = 0
-        self._add_file_descriptor(io_number, filename, 'r')
-        
-    def _add_file_descriptor(self, io_number, filename, mode):    
-        try:            
-            if filename.startswith('/'):
-                if filename=='/dev/null':
-                    f = win32_open_devnull(mode+'b')
-                else:
-                    # TODO: handle absolute pathnames, they are unlikely to exist on the
-                    # current platform (win32 for instance).
-                    raise NotImplementedError('cannot open absolute path %s' % repr(filename))
-            else:
-                f = file(filename, mode+'b')
-        except IOError as e:
-            raise RedirectionError(str(e))
-            
-        wrapper = None
-        try:
-            wrapper = FileWrapper(mode, f)
-            f = None
-            self._add_descriptor(io_number, wrapper)
-        except:
-            if f: f.close()
-            if wrapper: wrapper.close()
-            raise
-            
-    def _dup_output_descriptor(self, source_fd, dest_fd):
-        if source_fd is None:
-            source_fd = 1
-        self._dup_file_descriptor(source_fd, dest_fd, 'w')
-            
-    def _dup_file_descriptor(self, source_fd, dest_fd, mode):
-        source_fd = int(source_fd)
-        if source_fd not in self._descriptors:
-            raise RedirectionError('"%s" is not a valid file descriptor' % str(source_fd))
-        source = self._descriptors[source_fd]
-        
-        if source.mode()!=mode:
-            raise RedirectionError('Descriptor %s cannot be duplicated in mode "%s"' % (str(source), mode))
-        
-        if dest_fd=='-':
-            # Close the source descriptor
-            del self._descriptors[source_fd]
-            source.close()
-        else:
-            dest_fd = int(dest_fd)
-            if dest_fd not in self._descriptors:
-                raise RedirectionError('Cannot replace file descriptor %s' % str(dest_fd))
-                
-            dest = self._descriptors[dest_fd]
-            if dest.mode()!=mode:
-                raise RedirectionError('Descriptor %s cannot be cannot be redirected in mode "%s"' % (str(dest), mode))
-            
-            self._descriptors[dest_fd] = source.dup()
-            dest.close()        
-            
-    def _add_descriptor(self, io_number, file):
-        io_number = int(io_number)
-        
-        if io_number in self._descriptors:
-            # Close the current descriptor
-            d = self._descriptors[io_number]
-            del self._descriptors[io_number]
-            d.close()
-            
-        self._descriptors[io_number] = file
-
-    def __str__(self):
-        names = [('%d=%r' % (k, getattr(v, 'name', None))) for k,v
-                 in self._descriptors.iteritems()]
-        names = ','.join(names)
-        return 'Redirections(%s)' % names
-
-    def __del__(self):
-        self.close()
-    
-def cygwin_to_windows_path(path):
-    """Turn /cygdrive/c/foo into c:/foo, or return path if it
-    is not a cygwin path.
-    """
-    if not path.startswith('/cygdrive/'):
-        return path
-    path = path[len('/cygdrive/'):]
-    path = path[:1] + ':' + path[1:]
-    return path
-    
-def win32_to_unix_path(path):
-    if path is not None:
-        path = path.replace('\\', '/')
-    return path    
-    
-_RE_SHEBANG = re.compile(r'^\#!\s?([^\s]+)(?:\s([^\s]+))?')
-_SHEBANG_CMDS = {
-    '/usr/bin/env': 'env',
-    '/bin/sh': 'pysh',
-    'python': 'python',
-}
-    
-def resolve_shebang(path, ignoreshell=False):
-    """Return a list of arguments as shebang interpreter call or an empty list
-    if path does not refer to an executable script.
-    See <http://www.opengroup.org/austin/docs/austin_51r2.txt>.
-    
-    ignoreshell - set to True to ignore sh shebangs. Return an empty list instead.
-    """
-    try:
-        f = file(path)
-        try:
-            # At most 80 characters in the first line
-            header = f.read(80).splitlines()[0]
-        finally:
-            f.close()
-            
-        m = _RE_SHEBANG.search(header)
-        if not m:
-            return []
-        cmd, arg = m.group(1,2)
-        if os.path.isfile(cmd):
-            # Keep this one, the hg script for instance contains a weird windows
-            # shebang referencing the current python install.
-            cmdfile = os.path.basename(cmd).lower()
-            if cmdfile == 'python.exe':
-                cmd = 'python'
-            pass
-        elif cmd not in _SHEBANG_CMDS:
-            raise CommandNotFound('Unknown interpreter "%s" referenced in '\
-                'shebang' % header)
-        cmd = _SHEBANG_CMDS.get(cmd)
-        if cmd is None or (ignoreshell and cmd == 'pysh'):
-            return []
-        if arg is None:
-            return [cmd, win32_to_unix_path(path)]
-        return [cmd, arg, win32_to_unix_path(path)]
-    except IOError as e:
-        if  e.errno!=errno.ENOENT and \
-            (e.errno!=errno.EPERM and not os.path.isdir(path)): # Opening a directory raises EPERM
-            raise
-        return []
-        
-def win32_find_in_path(name, path):
-    if isinstance(path, str):
-        path = path.split(os.pathsep)
-        
-    exts = os.environ.get('PATHEXT', '').lower().split(os.pathsep)
-    for p in path:
-        p_name = os.path.join(p, name)
-        
-        prefix = resolve_shebang(p_name)
-        if prefix:
-            return prefix
-            
-        for ext in exts:    
-            p_name_ext = p_name + ext
-            if os.path.exists(p_name_ext):
-                return [win32_to_unix_path(p_name_ext)]
-    return []
-
-class Traps(dict):
-    def __setitem__(self, key, value):
-        if key not in ('EXIT',):
-            raise NotImplementedError()
-        super(Traps, self).__setitem__(key, value)
-
-# IFS white spaces character class
-_IFS_WHITESPACES = (' ', '\t', '\n')
-
-class Environment:
-    """Environment holds environment variables, export table, function 
-    definitions and whatever is defined in 2.12 "Shell Execution Environment",
-    redirection excepted.
-    """
-    def __init__(self, pwd):
-        self._opt = set()       #Shell options
-        
-        self._functions = {}        
-        self._env = {'?': '0', '#': '0'}
-        self._exported = set([
-            'HOME', 'IFS', 'PATH'
-        ])
-        
-        # Set environment vars with side-effects
-        self._ifs_ws = None     # Set of IFS whitespace characters
-        self._ifs_re = None     # Regular expression used to split between words using IFS classes
-        self['IFS'] = ''.join(_IFS_WHITESPACES) #Default environment values
-        self['PWD'] = pwd
-        self.traps = Traps()
-        
-    def clone(self, subshell=False):
-        env = Environment(self['PWD'])
-        env._opt = set(self._opt)
-        for k,v in self.get_variables().iteritems():
-            if k in self._exported:
-                env.export(k,v)
-            elif subshell:
-                env[k] = v
-                
-        if subshell:
-            env._functions = dict(self._functions)
-            
-        return env        
-        
-    def __getitem__(self, key):
-        if key in ('@', '*', '-', '$'):
-            raise NotImplementedError('%s is not implemented' % repr(key))
-        return self._env[key]
-        
-    def get(self, key, defval=None):
-        try:
-            return self[key]
-        except KeyError:
-            return defval
-        
-    def __setitem__(self, key, value):
-        if key=='IFS':
-            # Update the whitespace/non-whitespace classes
-            self._update_ifs(value)
-        elif key=='PWD':
-            pwd = os.path.abspath(value)
-            if not os.path.isdir(pwd):
-                raise VarAssignmentError('Invalid directory %s' % value)
-            value = pwd
-        elif key in ('?', '!'):
-            value = str(int(value))
-        self._env[key] = value
-        
-    def __delitem__(self, key):
-        if key in ('IFS', 'PWD', '?'):
-            raise VarAssignmentError('%s cannot be unset' % key)
-        del self._env[key]
-
-    def __contains__(self, item):
-        return item in self._env
-        
-    def set_positional_args(self, args):
-        """Set the content of 'args' as positional argument from 1 to len(args).
-        Return previous argument as a list of strings.
-        """
-        # Save and remove previous arguments
-        prevargs = []        
-        for i in range(int(self._env['#'])):
-            i = str(i+1)
-            prevargs.append(self._env[i])
-            del self._env[i]
-        self._env['#'] = '0'
-                
-        #Set new ones
-        for i,arg in enumerate(args):
-            self._env[str(i+1)] = str(arg)
-        self._env['#'] = str(len(args))
-        
-        return prevargs
-        
-    def get_positional_args(self):
-        return [self._env[str(i+1)] for i in range(int(self._env['#']))]
-        
-    def get_variables(self):
-        return dict(self._env)
-        
-    def export(self, key, value=None):
-        if value is not None:
-            self[key] = value
-        self._exported.add(key)
-        
-    def get_exported(self):
-        return [(k,self._env.get(k)) for k in self._exported]
-            
-    def split_fields(self, word):
-        if not self._ifs_ws or not word:
-            return [word]
-        return re.split(self._ifs_re, word)
-   
-    def _update_ifs(self, value):
-        """Update the split_fields related variables when IFS character set is
-        changed.
-        """
-        # TODO: handle NULL IFS
-        
-        # Separate characters in whitespace and non-whitespace
-        chars = set(value)
-        ws = [c for c in chars if c in _IFS_WHITESPACES]
-        nws = [c for c in chars if c not in _IFS_WHITESPACES]
-        
-        # Keep whitespaces in a string for left and right stripping
-        self._ifs_ws = ''.join(ws)
-        
-        # Build a regexp to split fields
-        trailing = '[' + ''.join([re.escape(c) for c in ws]) + ']'
-        if nws:
-            # First, the single non-whitespace occurence.
-            nws = '[' + ''.join([re.escape(c) for c in nws]) + ']'
-            nws = '(?:' + trailing + '*' + nws + trailing + '*' + '|' + trailing + '+)'
-        else:
-            # Then mix all parts with quantifiers
-            nws = trailing + '+'
-        self._ifs_re = re.compile(nws)
-       
-    def has_opt(self, opt, val=None):
-        return (opt, val) in self._opt
-        
-    def set_opt(self, opt, val=None):
-        self._opt.add((opt, val))
-        
-    def find_in_path(self, name, pwd=False):
-        path = self._env.get('PATH', '').split(os.pathsep)
-        if pwd:
-            path[:0] = [self['PWD']]
-        if os.name == 'nt':
-            return win32_find_in_path(name, self._env.get('PATH', ''))
-        else:
-            raise NotImplementedError()
-            
-    def define_function(self, name, body):
-        if not is_name(name):
-            raise ShellSyntaxError('%s is not a valid function name' % repr(name))
-        self._functions[name] = body
-        
-    def remove_function(self, name):
-        del self._functions[name]
-        
-    def is_function(self, name):
-        return name in self._functions
-        
-    def get_function(self, name):
-        return self._functions.get(name)
-        
-       
-name_charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
-name_charset = dict(zip(name_charset,name_charset))
-           
-def match_name(s):
-    """Return the length in characters of the longest prefix made of name
-    allowed characters in s.
-    """
-    for i,c in enumerate(s):
-        if c not in name_charset:
-            return s[:i]
-    return s
-    
-def is_name(s):
-    return len([c for c in s if c not in name_charset])<=0
-    
-def is_special_param(c):
-    return len(c)==1 and c in ('@','*','#','?','-','$','!','0')
-    
-def utility_not_implemented(name, *args, **kwargs):
-    raise NotImplementedError('%s utility is not implemented' % name)
-    
-
-class Utility:
-    """Define utilities properties:
-    func -- utility callable. See builtin module for utility samples.
-    is_special -- see XCU 2.8.
-    """
-    def __init__(self, func, is_special=0):
-        self.func = func
-        self.is_special = bool(is_special)
-
-
-def encodeargs(args):
-    def encodearg(s):
-        lines = base64.encodestring(s)
-        lines = [l.splitlines()[0] for l in lines]
-        return ''.join(lines)
-
-    s = pickle.dumps(args)
-    return encodearg(s)
-
-def decodeargs(s):
-    s = base64.decodestring(s)
-    return pickle.loads(s)
-    
-
-class GlobError(Exception):
-    pass
-
-class Options:
-    def __init__(self):
-        # True if Mercurial operates with binary streams
-        self.hgbinary = True
-
-class Interpreter:
-    # Implementation is very basic: the execute() method just makes a DFS on the
-    # AST and execute nodes one by one. Nodes are tuple (name,obj) where name
-    # is a string identifier and obj the AST element returned by the parser.
-    #
-    # Handler are named after the node identifiers.
-    # TODO: check node names and remove the switch in execute with some
-    # dynamic getattr() call to find node handlers.
-    """Shell interpreter.
-    
-    The following debugging flags can be passed:
-    debug-parsing - enable PLY debugging.
-    debug-tree - print the generated AST.
-    debug-cmd - trace command execution before word expansion, plus exit status.
-    debug-utility - trace utility execution.
-    """
-    
-    # List supported commands.
-    COMMANDS = {
-        'cat':       Utility(builtin.utility_cat,),
-        'cd':       Utility(builtin.utility_cd,),
-        ':':        Utility(builtin.utility_colon,),
-        'echo':     Utility(builtin.utility_echo),
-        'env':      Utility(builtin.utility_env),
-        'exit':     Utility(builtin.utility_exit),
-        'export':   Utility(builtin.builtin_export,     is_special=1),
-        'egrep':    Utility(builtin.utility_egrep),
-        'fgrep':    Utility(builtin.utility_fgrep),
-        'gunzip':   Utility(builtin.utility_gunzip),
-        'kill':     Utility(builtin.utility_kill),
-        'mkdir':    Utility(builtin.utility_mkdir),
-        'netstat':  Utility(builtin.utility_netstat),
-        'printf':   Utility(builtin.utility_printf),
-        'pwd':      Utility(builtin.utility_pwd),
-        'return':   Utility(builtin.builtin_return,     is_special=1),
-        'sed':      Utility(builtin.utility_sed,),
-        'set':      Utility(builtin.builtin_set,),
-        'shift':    Utility(builtin.builtin_shift,),
-        'sleep':    Utility(builtin.utility_sleep,),
-        'sort':     Utility(builtin.utility_sort,),
-        'trap':     Utility(builtin.builtin_trap,       is_special=1),
-        'true':     Utility(builtin.utility_true),
-        'unset':    Utility(builtin.builtin_unset,      is_special=1),
-        'wait':     Utility(builtin.builtin_wait,       is_special=1),
-    }
-    
-    def __init__(self, pwd, debugflags = [], env=None, redirs=None, stdin=None,
-                 stdout=None, stderr=None, opts=Options()):
-        self._env = env
-        if self._env is None:
-            self._env = Environment(pwd)
-        self._children = {}
-            
-        self._redirs = redirs
-        self._close_redirs = False
-        
-        if self._redirs is None:
-            if stdin is None:
-                stdin = sys.stdin
-            if stdout is None:
-                stdout = sys.stdout
-            if stderr is None:
-                stderr = sys.stderr
-            stdin = FileWrapper('r', stdin, False)
-            stdout = FileWrapper('w', stdout, False)
-            stderr = FileWrapper('w', stderr, False)
-            self._redirs = Redirections(stdin, stdout, stderr)
-            self._close_redirs = True
-            
-        self._debugflags = list(debugflags)
-        self._logfile = sys.stderr
-        self._options = opts
-        
-    def close(self):
-        """Must be called when the interpreter is no longer used."""
-        script = self._env.traps.get('EXIT')
-        if script:
-            try:
-                self.execute_script(script=script)
-            except:
-                pass
-
-        if self._redirs is not None and self._close_redirs:
-            self._redirs.close()
-            self._redirs = None
-            
-    def log(self, s):
-        self._logfile.write(s)
-        self._logfile.flush()
-            
-    def __getitem__(self, key):
-        return self._env[key]
-        
-    def __setitem__(self, key, value):
-        self._env[key] = value
-
-    def options(self):
-        return self._options
-
-    def redirect(self, redirs, ios):
-        def add_redir(io):
-            if isinstance(io, pyshyacc.IORedirect):
-                redirs.add(self, io.op, io.filename, io.io_number)
-            else:
-                redirs.add_here_document(self, io.name, io.content, io.io_number)
-                    
-        map(add_redir, ios)
-        return redirs
-            
-    def execute_script(self, script=None, ast=None, sourced=False,
-                       scriptpath=None):
-        """If script is not None, parse the input. Otherwise takes the supplied
-        AST. Then execute the AST.
-        Return the script exit status.
-        """
-        try:
-            if scriptpath is not None:
-                self._env['0'] = os.path.abspath(scriptpath)
-
-            if script is not None:
-                debug_parsing = ('debug-parsing' in self._debugflags)    
-                cmds, script = pyshyacc.parse(script, True, debug_parsing)
-                if 'debug-tree' in self._debugflags:
-                    pyshyacc.print_commands(cmds, self._logfile)
-                    self._logfile.flush()
-            else:
-                cmds, script = ast, ''                
-                
-            status = 0
-            for cmd in cmds:
-                try:
-                    status = self.execute(cmd)
-                except ExitSignal as e:
-                    if sourced:
-                        raise
-                    status = int(e.args[0])
-                    return status
-                except ShellError:
-                    self._env['?'] = 1
-                    raise
-                if 'debug-utility' in self._debugflags or 'debug-cmd' in self._debugflags:
-                    self.log('returncode ' + str(status)+ '\n')
-            return status
-        except CommandNotFound as e:
-            print >>self._redirs.stderr, str(e)
-            self._redirs.stderr.flush()
-            # Command not found by non-interactive shell
-            # return 127
-            raise
-        except RedirectionError as e:
-            # TODO: should be handled depending on the utility status
-            print >>self._redirs.stderr, str(e)
-            self._redirs.stderr.flush()
-            # Command not found by non-interactive shell
-            # return 127
-            raise
-
-    def dotcommand(self, env, args):
-        if len(args) < 1:
-            raise ShellError('. expects at least one argument')
-        path = args[0]
-        if '/' not in path:
-            found = env.find_in_path(args[0], True)
-            if found:
-                path = found[0]
-        script = file(path).read()
-        return self.execute_script(script=script, sourced=True)
-
-    def execute(self, token, redirs=None):
-        """Execute and AST subtree with supplied redirections overriding default
-        interpreter ones.
-        Return the exit status.
-        """
-        if not token:
-            return 0
-            
-        if redirs is None:
-            redirs = self._redirs
-            
-        if isinstance(token, list):
-            # Commands sequence
-            res = 0
-            for t in token:
-                res = self.execute(t, redirs)
-            return res
-
-        type, value = token
-        status = 0
-        if type=='simple_command':
-            redirs_copy = redirs.clone()
-            try:
-                # TODO: define and handle command return values
-                # TODO: implement set -e
-                status = self._execute_simple_command(value, redirs_copy)
-            finally:
-                redirs_copy.close()
-        elif type=='pipeline':
-            status = self._execute_pipeline(value, redirs)
-        elif type=='and_or':
-            status = self._execute_and_or(value, redirs)
-        elif type=='for_clause':
-            status = self._execute_for_clause(value, redirs)
-        elif type=='while_clause':
-            status = self._execute_while_clause(value, redirs)
-        elif type=='function_definition':
-            status = self._execute_function_definition(value, redirs)
-        elif type=='brace_group':
-            status = self._execute_brace_group(value, redirs)
-        elif type=='if_clause':
-            status = self._execute_if_clause(value, redirs)
-        elif type=='subshell':
-            status = self.subshell(ast=value.cmds, redirs=redirs)
-        elif type=='async':
-            status = self._asynclist(value)
-        elif type=='redirect_list':
-            redirs_copy = self.redirect(redirs.clone(), value.redirs)
-            try:
-                status = self.execute(value.cmd, redirs_copy)
-            finally:
-                redirs_copy.close()
-        else:
-            raise NotImplementedError('Unsupported token type ' + type)
-
-        if status < 0:
-            status = 255
-        return status
-            
-    def _execute_if_clause(self, if_clause, redirs):
-        cond_status = self.execute(if_clause.cond, redirs)
-        if cond_status==0:
-            return self.execute(if_clause.if_cmds, redirs)
-        else:
-            return self.execute(if_clause.else_cmds, redirs)
-      
-    def _execute_brace_group(self, group, redirs):
-        status = 0
-        for cmd in group.cmds:
-            status = self.execute(cmd, redirs)
-        return status
-            
-    def _execute_function_definition(self, fundef, redirs):
-        self._env.define_function(fundef.name, fundef.body)
-        return 0
-            
-    def _execute_while_clause(self, while_clause, redirs):
-        status = 0
-        while 1:
-            cond_status = 0
-            for cond in while_clause.condition:
-                cond_status = self.execute(cond, redirs)
-                
-            if cond_status:
-                break
-                
-            for cmd in while_clause.cmds:
-                status = self.execute(cmd, redirs)
-                
-        return status
-            
-    def _execute_for_clause(self, for_clause, redirs):
-        if not is_name(for_clause.name):
-            raise ShellSyntaxError('%s is not a valid name' % repr(for_clause.name))
-        items = mappend(self.expand_token, for_clause.items)
-        
-        status = 0    
-        for item in items:
-            self._env[for_clause.name] = item
-            for cmd in for_clause.cmds:
-                status = self.execute(cmd, redirs)
-        return status
-            
-    def _execute_and_or(self, or_and, redirs):
-        res = self.execute(or_and.left, redirs)        
-        if (or_and.op=='&&' and res==0) or (or_and.op!='&&' and res!=0):
-            res = self.execute(or_and.right, redirs)
-        return res
-            
-    def _execute_pipeline(self, pipeline, redirs):            
-        if len(pipeline.commands)==1:
-            status = self.execute(pipeline.commands[0], redirs)
-        else:
-            # Execute all commands one after the other
-            status = 0
-            inpath, outpath = None, None
-            try:
-                # Commands inputs and outputs cannot really be plugged as done
-                # by a real shell. Run commands sequentially and chain their
-                # input/output throught temporary files.
-                tmpfd, inpath = tempfile.mkstemp()
-                os.close(tmpfd)
-                tmpfd, outpath = tempfile.mkstemp()
-                os.close(tmpfd)
-                
-                inpath = win32_to_unix_path(inpath)
-                outpath = win32_to_unix_path(outpath)
-                
-                for i, cmd in enumerate(pipeline.commands):
-                    call_redirs = redirs.clone()
-                    try:
-                        if i!=0:
-                            call_redirs.add(self, '<', inpath)
-                        if i!=len(pipeline.commands)-1:
-                            call_redirs.add(self, '>', outpath)
-                        
-                        status = self.execute(cmd, call_redirs)
-                        
-                        # Chain inputs/outputs
-                        inpath, outpath = outpath, inpath
-                    finally:
-                        call_redirs.close()            
-            finally:
-                if inpath: os.remove(inpath)
-                if outpath: os.remove(outpath)
-        
-        if pipeline.reverse_status:
-            status = int(not status)
-        self._env['?'] = status
-        return status
-        
-    def _execute_function(self, name, args, interp, env, stdin, stdout, stderr, *others):
-        assert interp is self
-        
-        func = env.get_function(name)
-        #Set positional parameters
-        prevargs = None
-        try:
-            prevargs = env.set_positional_args(args)
-            try:
-                redirs = Redirections(stdin.dup(), stdout.dup(), stderr.dup())
-                try:
-                    status = self.execute(func, redirs)
-                finally:
-                    redirs.close()
-            except ReturnSignal as e:
-                status = int(e.args[0])
-                env['?'] = status
-            return status
-        finally:
-            #Reset positional parameters
-            if prevargs is not None:
-                env.set_positional_args(prevargs)
-                
-    def _execute_simple_command(self, token, redirs):
-        """Can raise ReturnSignal when return builtin is called, ExitSignal when
-        exit is called, and other shell exceptions upon builtin failures.
-        """
-        debug_command = 'debug-cmd' in self._debugflags
-        if debug_command:
-            self.log('word' + repr(token.words) + '\n')
-            self.log('assigns' + repr(token.assigns) + '\n')
-            self.log('redirs' + repr(token.redirs) + '\n')
-        
-        is_special = None
-        env = self._env
-        
-        try:
-            # Word expansion
-            args = []
-            for word in token.words:                
-                args += self.expand_token(word)
-                if is_special is None and args:
-                    is_special = env.is_function(args[0]) or \
-                        (args[0] in self.COMMANDS and self.COMMANDS[args[0]].is_special)
-                        
-            if debug_command:
-                self.log('_execute_simple_command' + str(args) + '\n')
-                
-            if not args:
-                # Redirections happen is a subshell
-                redirs = redirs.clone()
-            elif not is_special:
-                env = self._env.clone()
-            
-            # Redirections
-            self.redirect(redirs, token.redirs)
-                
-            # Variables assignments
-            res = 0
-            for type,(k,v) in token.assigns:
-                status, expanded = self.expand_variable((k,v))
-                if status is not None:
-                    res = status
-                if args:
-                    env.export(k, expanded)
-                else:
-                    env[k] = expanded
-                
-            if args and args[0] in ('.', 'source'):
-                res = self.dotcommand(env, args[1:])
-            elif args:
-                if args[0] in self.COMMANDS:
-                    command = self.COMMANDS[args[0]]
-                elif env.is_function(args[0]):
-                    command = Utility(self._execute_function, is_special=True)
-                else:
-                    if not '/' in args[0].replace('\\', '/'):
-                        cmd = env.find_in_path(args[0])
-                        if not cmd:
-                            # TODO: test error code on unknown command => 127
-                            raise CommandNotFound('Unknown command: "%s"' % args[0])
-                    else:
-                        # Handle commands like '/cygdrive/c/foo.bat'
-                        cmd = cygwin_to_windows_path(args[0])
-                        if not os.path.exists(cmd):
-                            raise CommandNotFound('%s: No such file or directory' % args[0])
-                        shebang = resolve_shebang(cmd)
-                        if shebang:
-                            cmd = shebang
-                        else:
-                            cmd = [cmd]
-                    args[0:1] = cmd
-                    command = Utility(builtin.run_command)
-                
-                # Command execution
-                if 'debug-cmd' in self._debugflags:
-                    self.log('redirections ' + str(redirs) + '\n')
-                    
-                res = command.func(args[0], args[1:], self, env,
-                                   redirs.stdin(), redirs.stdout(), 
-                                   redirs.stderr(), self._debugflags)
-            
-            if self._env.has_opt('-x'):
-                # Trace command execution in shell environment
-                # BUG: would be hard to reproduce a real shell behaviour since
-                # the AST is not annotated with source lines/tokens.
-                self._redirs.stdout().write(' '.join(args))
-                
-        except ReturnSignal:
-            raise
-        except ShellError as e:
-            if is_special or isinstance(e, (ExitSignal,
-                                            ShellSyntaxError, ExpansionError)):
-                raise e
-            self._redirs.stderr().write(str(e)+'\n')
-            return 1
-
-        return res
-
-    def expand_token(self, word):
-        """Expand a word as specified in [2.6 Word Expansions]. Return the list
-        of expanded words.
-        """
-        status, wtrees = self._expand_word(word)
-        return map(pyshlex.wordtree_as_string, wtrees)
-        
-    def expand_variable(self, word):
-        """Return a status code (or None if no command expansion occurred)
-        and a single word.
-        """
-        status, wtrees = self._expand_word(word, pathname=False, split=False)
-        words = map(pyshlex.wordtree_as_string, wtrees)
-        assert len(words)==1
-        return status, words[0]
-        
-    def expand_here_document(self, word):
-        """Return the expanded document as a single word. The here document is 
-        assumed to be unquoted.
-        """
-        status, wtrees = self._expand_word(word, pathname=False,
-                                           split=False, here_document=True)
-        words = map(pyshlex.wordtree_as_string, wtrees)
-        assert len(words)==1
-        return words[0]
-        
-    def expand_redirection(self, word):
-        """Return a single word."""
-        return self.expand_variable(word)[1]
-        
-    def get_env(self):
-        return self._env
-        
-    def _expand_word(self, token, pathname=True, split=True, here_document=False):
-        wtree = pyshlex.make_wordtree(token[1], here_document=here_document)
-        
-        # TODO: implement tilde expansion
-        def expand(wtree):
-            """Return a pseudo wordtree: the tree or its subelements can be empty
-            lists when no value result from the expansion.
-            """
-            status = None
-            for part in wtree:
-                if not isinstance(part, list):
-                    continue
-                if part[0]in ("'", '\\'):
-                    continue
-                elif part[0] in ('`', '$('):
-                    status, result = self._expand_command(part)
-                    part[:] = result
-                elif part[0] in ('$', '${'):
-                    part[:] = self._expand_parameter(part, wtree[0]=='"', split)
-                elif part[0] in ('', '"'):
-                    status, result = expand(part)
-                    part[:] = result
-                else:
-                    raise NotImplementedError('%s expansion is not implemented'
-                                              % part[0])
-            # [] is returned when an expansion result in no-field,
-            # like an empty $@
-            wtree = [p for p in wtree if p != []]
-            if len(wtree) < 3:
-                return status, []
-            return status, wtree
-        
-        status, wtree = expand(wtree)
-        if len(wtree) == 0:
-            return status, wtree
-        wtree = pyshlex.normalize_wordtree(wtree)
-        
-        if split:
-            wtrees = self._split_fields(wtree)
-        else:
-            wtrees = [wtree]
-        
-        if pathname:
-            wtrees = mappend(self._expand_pathname, wtrees)
-        
-        wtrees = map(self._remove_quotes, wtrees)
-        return status, wtrees
-        
-    def _expand_command(self, wtree):
-        # BUG: there is something to do with backslashes and quoted
-        # characters here
-        command = pyshlex.wordtree_as_string(wtree[1:-1])
-        status, output = self.subshell_output(command)
-        return status, ['', output, '']
-        
-    def _expand_parameter(self, wtree, quoted=False, split=False):
-        """Return a valid wtree or an empty list when no parameter results."""
-        # Get the parameter name
-        # TODO: implement weird expansion rules with ':'
-        name = pyshlex.wordtree_as_string(wtree[1:-1])
-        if not is_name(name) and not is_special_param(name):
-            raise ExpansionError('Bad substitution "%s"' % name)
-        # TODO: implement special parameters
-        if name in ('@', '*'):
-            args = self._env.get_positional_args()
-            if len(args) == 0:
-                return []
-            if len(args)<2:
-                return ['', ''.join(args), '']
-                
-            sep = self._env.get('IFS', '')[:1]
-            if split and quoted and name=='@':
-                # Introduce a new token to tell the caller that these parameters
-                # cause a split as specified in 2.5.2
-                return ['@'] + args + ['']
-            else:
-                return ['', sep.join(args), '']                
-        
-        return ['', self._env.get(name, ''), '']
-        
-    def _split_fields(self, wtree):
-        def is_empty(split):
-            return split==['', '', '']
-            
-        def split_positional(quoted):
-            # Return a list of wtree split according positional parameters rules.
-            # All remaining '@' groups are removed.
-            assert quoted[0]=='"'
-            
-            splits = [[]]
-            for part in quoted:
-                if not isinstance(part, list) or part[0]!='@':
-                    splits[-1].append(part)
-                else:
-                    # Empty or single argument list were dealt with already
-                    assert len(part)>3
-                    # First argument must join with the beginning part of the original word
-                    splits[-1].append(part[1])
-                    # Create double-quotes expressions for every argument after the first
-                    for arg in part[2:-1]:
-                        splits[-1].append('"')
-                        splits.append(['"', arg])
-            return splits
-        
-        # At this point, all expansions but pathnames have occured. Only quoted
-        # and positional sequences remain. Thus, all candidates for field splitting 
-        # are in the tree root, or are positional splits ('@') and lie in root
-        # children.
-        if not wtree or wtree[0] not in ('', '"'):
-            # The whole token is quoted or empty, nothing to split
-            return [wtree]
-            
-        if wtree[0]=='"':
-            wtree = ['', wtree, '']
-        
-        result = [['', '']]
-        for part in wtree[1:-1]:
-            if isinstance(part, list):
-                if part[0]=='"':
-                    splits = split_positional(part)
-                    if len(splits)<=1:
-                        result[-1] += [part, '']
-                    else:
-                        # Terminate the current split
-                        result[-1] += [splits[0], '']
-                        result += splits[1:-1]
-                        # Create a new split
-                        result += [['', splits[-1], '']]
-                else:
-                    result[-1] += [part, '']
-            else:
-                splits = self._env.split_fields(part)
-                if len(splits)<=1:
-                    # No split
-                    result[-1][-1] += part
-                else:
-                    # Terminate the current resulting part and create a new one
-                    result[-1][-1] += splits[0]
-                    result[-1].append('')
-                    result += [['', r, ''] for r in splits[1:-1]]
-                    result += [['', splits[-1]]]
-        result[-1].append('')
-        
-        # Leading and trailing empty groups come from leading/trailing blanks
-        if result and is_empty(result[-1]):
-            result[-1:] = []
-        if result and is_empty(result[0]):
-            result[:1] = []
-        return result
-        
-    def _expand_pathname(self, wtree):
-        """See [2.6.6 Pathname Expansion]."""
-        if self._env.has_opt('-f'):
-            return [wtree]
-        
-        # All expansions have been performed, only quoted sequences should remain
-        # in the tree. Generate the pattern by folding the tree, escaping special
-        # characters when appear quoted
-        special_chars = '*?[]'
-        
-        def make_pattern(wtree):
-            subpattern = []
-            for part in wtree[1:-1]:
-                if isinstance(part, list):
-                    part = make_pattern(part)
-                elif wtree[0]!='':
-                    for c in part:
-                        # Meta-characters cannot be quoted
-                        if c in special_chars:
-                            raise GlobError()
-                subpattern.append(part)
-            return ''.join(subpattern)
-            
-        def pwd_glob(pattern):
-            cwd = os.getcwd()
-            os.chdir(self._env['PWD'])
-            try:
-                return glob.glob(pattern) 
-            finally:
-                os.chdir(cwd)    
-            
-        #TODO: check working directory issues here wrt relative patterns
-        try:
-            pattern = make_pattern(wtree)
-            paths = pwd_glob(pattern)
-        except GlobError:
-            # BUG: Meta-characters were found in quoted sequences. The should 
-            # have been used literally but this is unsupported in current glob module.
-            # Instead we consider the whole tree must be used literally and
-            # therefore there is no point in globbing. This is wrong when meta
-            # characters are mixed with quoted meta in the same pattern like:
-            # < foo*"py*" >
-            paths = []
-            
-        if not paths:
-            return [wtree]
-        return [['', path, ''] for path in paths]
-        
-    def _remove_quotes(self, wtree):
-        """See [2.6.7 Quote Removal]."""
-        
-        def unquote(wtree):
-            unquoted = []
-            for part in wtree[1:-1]:
-                if isinstance(part, list):
-                    part = unquote(part)
-                unquoted.append(part)
-            return ''.join(unquoted)
-            
-        return ['', unquote(wtree), '']
-        
-    def subshell(self, script=None, ast=None, redirs=None):
-        """Execute the script or AST in a subshell, with inherited redirections
-        if redirs is not None.
-        """
-        if redirs:
-            sub_redirs = redirs
-        else:
-            sub_redirs = redirs.clone()
-        
-        subshell = None    
-        try:
-            subshell = Interpreter(None, self._debugflags, self._env.clone(True),
-                                   sub_redirs, opts=self._options)
-            return subshell.execute_script(script, ast)
-        finally:
-            if not redirs: sub_redirs.close()
-            if subshell: subshell.close()
-        
-    def subshell_output(self, script):
-        """Execute the script in a subshell and return the captured output."""        
-        # Create temporary file to capture subshell output
-        tmpfd, tmppath = tempfile.mkstemp()
-        try:
-            tmpfile = os.fdopen(tmpfd, 'wb')
-            stdout = FileWrapper('w', tmpfile)
-            
-            redirs = Redirections(self._redirs.stdin().dup(),
-                                  stdout,
-                                  self._redirs.stderr().dup())            
-            try:
-                status = self.subshell(script=script, redirs=redirs)
-            finally:
-                redirs.close()
-                redirs = None
-            
-            # Extract subshell standard output
-            tmpfile = open(tmppath, 'rb')
-            try:
-                output = tmpfile.read()
-                return status, output.rstrip('\n')
-            finally:
-                tmpfile.close()
-        finally:
-            os.remove(tmppath)
-
-    def _asynclist(self, cmd):
-        args = (self._env.get_variables(), cmd)
-        arg = encodeargs(args)
-        assert len(args) < 30*1024
-        cmd = ['pysh.bat', '--ast', '-c', arg]
-        p = subprocess.Popen(cmd, cwd=self._env['PWD'])
-        self._children[p.pid] = p
-        self._env['!'] = p.pid
-        return 0
-
-    def wait(self, pids=None):
-        if not pids:
-            pids = self._children.keys()
-
-        status = 127
-        for pid in pids:
-            if pid not in self._children:
-                continue
-            p = self._children.pop(pid)
-            status = p.wait()
-
-        return status
-

+ 0 - 116
bitbake/lib/bb/pysh/lsprof.py

@@ -1,116 +0,0 @@
-#! /usr/bin/env python
-
-import sys
-from _lsprof import Profiler, profiler_entry
-
-__all__ = ['profile', 'Stats']
-
-def profile(f, *args, **kwds):
-    """XXX docstring"""
-    p = Profiler()
-    p.enable(subcalls=True, builtins=True)
-    try:
-        f(*args, **kwds)
-    finally:
-        p.disable()
-    return Stats(p.getstats())
-
-
-class Stats(object):
-    """XXX docstring"""
-
-    def __init__(self, data):
-        self.data = data
-
-    def sort(self, crit="inlinetime"):
-        """XXX docstring"""
-        if crit not in profiler_entry.__dict__:
-            raise ValueError("Can't sort by %s" % crit)
-        self.data.sort(lambda b, a: cmp(getattr(a, crit),
-                                        getattr(b, crit)))
-        for e in self.data:
-            if e.calls:
-                e.calls.sort(lambda b, a: cmp(getattr(a, crit),
-                                              getattr(b, crit)))
-
-    def pprint(self, top=None, file=None, limit=None, climit=None):
-        """XXX docstring"""
-        if file is None:
-            file = sys.stdout
-        d = self.data
-        if top is not None:
-            d = d[:top]
-        cols = "% 12s %12s %11.4f %11.4f   %s\n"
-        hcols = "% 12s %12s %12s %12s %s\n"
-        cols2 = "+%12s %12s %11.4f %11.4f +  %s\n"
-        file.write(hcols % ("CallCount", "Recursive", "Total(ms)",
-                            "Inline(ms)", "module:lineno(function)"))
-        count = 0
-        for e in d:
-            file.write(cols % (e.callcount, e.reccallcount, e.totaltime,
-                               e.inlinetime, label(e.code)))
-            count += 1
-            if limit is not None and count == limit:
-                return
-            ccount = 0
-            if e.calls:
-                for se in e.calls:
-                    file.write(cols % ("+%s" % se.callcount, se.reccallcount,
-                                       se.totaltime, se.inlinetime,
-                                       "+%s" % label(se.code)))
-                    count += 1
-                    ccount += 1
-                    if limit is not None and count == limit:
-                        return
-                    if climit is not None and ccount == climit:
-                        break
-
-    def freeze(self):
-        """Replace all references to code objects with string
-        descriptions; this makes it possible to pickle the instance."""
-
-        # this code is probably rather ickier than it needs to be!
-        for i in range(len(self.data)):
-            e = self.data[i]
-            if not isinstance(e.code, str):
-                self.data[i] = type(e)((label(e.code),) + e[1:])
-            if e.calls:
-                for j in range(len(e.calls)):
-                    se = e.calls[j]
-                    if not isinstance(se.code, str):
-                        e.calls[j] = type(se)((label(se.code),) + se[1:])
-
-_fn2mod = {}
-
-def label(code):
-    if isinstance(code, str):
-        return code
-    try:
-        mname = _fn2mod[code.co_filename]
-    except KeyError:
-        for k, v in sys.modules.items():
-            if v is None:
-                continue
-            if not hasattr(v, '__file__'):
-                continue
-            if not isinstance(v.__file__, str):
-                continue
-            if v.__file__.startswith(code.co_filename):
-                mname = _fn2mod[code.co_filename] = k
-                break
-        else:
-            mname = _fn2mod[code.co_filename] = '<%s>'%code.co_filename
-
-    return '%s:%d(%s)' % (mname, code.co_firstlineno, code.co_name)
-
-
-if __name__ == '__main__':
-    import os
-    sys.argv = sys.argv[1:]
-    if not sys.argv:
-        print >> sys.stderr, "usage: lsprof.py <script> <arguments...>"
-        sys.exit(2)
-    sys.path.insert(0, os.path.abspath(os.path.dirname(sys.argv[0])))
-    stats = profile(execfile, sys.argv[0], globals(), locals())
-    stats.sort()
-    stats.pprint()

+ 0 - 167
bitbake/lib/bb/pysh/pysh.py

@@ -1,167 +0,0 @@
-# pysh.py - command processing for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-import optparse
-import os
-import sys
-
-import interp
-
-SH_OPT = optparse.OptionParser(prog='pysh', usage="%prog [OPTIONS]", version='0.1')
-SH_OPT.add_option('-c', action='store_true', dest='command_string', default=None, 
-    help='A string that shall be interpreted by the shell as one or more commands')
-SH_OPT.add_option('--redirect-to', dest='redirect_to', default=None, 
-    help='Redirect script commands stdout and stderr to the specified file')
-# See utility_command in builtin.py about the reason for this flag.
-SH_OPT.add_option('--redirected', dest='redirected', action='store_true', default=False, 
-    help='Tell the interpreter that stdout and stderr are actually the same objects, which is really stdout')
-SH_OPT.add_option('--debug-parsing', action='store_true', dest='debug_parsing', default=False, 
-    help='Trace PLY execution')
-SH_OPT.add_option('--debug-tree', action='store_true', dest='debug_tree', default=False, 
-    help='Display the generated syntax tree.')
-SH_OPT.add_option('--debug-cmd', action='store_true', dest='debug_cmd', default=False, 
-    help='Trace command execution before parameters expansion and exit status.')
-SH_OPT.add_option('--debug-utility', action='store_true', dest='debug_utility', default=False, 
-    help='Trace utility calls, after parameters expansions')
-SH_OPT.add_option('--ast', action='store_true', dest='ast', default=False,
-    help='Encoded commands to execute in a subprocess')
-SH_OPT.add_option('--profile', action='store_true', default=False,
-    help='Profile pysh run')
-    
-    
-def split_args(args):
-    # Separate shell arguments from command ones
-    # Just stop at the first argument not starting with a dash. I know, this is completely broken,
-    # it ignores files starting with a dash or may take option values for command file. This is not
-    # supposed to happen for now
-    command_index = len(args)
-    for i,arg in enumerate(args):
-        if not arg.startswith('-'):
-            command_index = i
-            break
-            
-    return args[:command_index], args[command_index:]
-
-
-def fixenv(env):
-    path = env.get('PATH')
-    if path is not None:
-        parts = path.split(os.pathsep)
-        # Remove Windows utilities from PATH, they are useless at best and
-        # some of them (find) may be confused with other utilities.
-        parts = [p for p in parts if 'system32' not in p.lower()]
-        env['PATH'] = os.pathsep.join(parts)
-    if env.get('HOME') is None:
-        # Several utilities, including cvsps, cannot work without
-        # a defined HOME directory.
-        env['HOME'] = os.path.expanduser('~')
-    return env
-
-def _sh(cwd, shargs, cmdargs, options, debugflags=None, env=None):
-    if os.environ.get('PYSH_TEXT') != '1':
-        import msvcrt
-        for fp in (sys.stdin, sys.stdout, sys.stderr):
-            msvcrt.setmode(fp.fileno(), os.O_BINARY)
-
-    hgbin = os.environ.get('PYSH_HGTEXT') != '1'
-    
-    if debugflags is None:
-        debugflags = []
-        if options.debug_parsing:    debugflags.append('debug-parsing')
-        if options.debug_utility:    debugflags.append('debug-utility')
-        if options.debug_cmd:        debugflags.append('debug-cmd')
-        if options.debug_tree:       debugflags.append('debug-tree')
-    
-    if env is None:
-        env = fixenv(dict(os.environ))
-    if cwd is None:
-        cwd = os.getcwd()
-
-    if not cmdargs:
-        # Nothing to do
-        return 0
-
-    ast = None
-    command_file = None
-    if options.command_string:
-        input = cmdargs[0]
-        if not options.ast:
-            input += '\n'
-        else:
-            args, input = interp.decodeargs(input), None
-            env, ast = args
-            cwd = env.get('PWD', cwd)
-    else:
-        command_file = cmdargs[0]
-        arguments = cmdargs[1:]
-
-        prefix = interp.resolve_shebang(command_file, ignoreshell=True)
-        if prefix:
-            input = ' '.join(prefix + [command_file] + arguments)
-        else:
-            # Read commands from file
-            f = file(command_file)
-            try:
-                # Trailing newline to help the parser
-                input = f.read() + '\n'
-            finally:
-                f.close()
-    
-    redirect = None
-    try:
-        if options.redirected:
-            stdout = sys.stdout
-            stderr = stdout
-        elif options.redirect_to:
-            redirect = open(options.redirect_to, 'wb')
-            stdout = redirect
-            stderr = redirect
-        else:
-            stdout = sys.stdout
-            stderr = sys.stderr
-            
-        # TODO: set arguments to environment variables
-        opts = interp.Options()
-        opts.hgbinary = hgbin
-        ip = interp.Interpreter(cwd, debugflags, stdout=stdout, stderr=stderr,
-                                opts=opts)
-        try:
-            # Export given environment in shell object
-            for k,v in env.iteritems():
-                ip.get_env().export(k,v)
-            return ip.execute_script(input, ast, scriptpath=command_file)
-        finally:
-            ip.close()
-    finally:
-        if redirect is not None:
-            redirect.close()
-
-def sh(cwd=None, args=None, debugflags=None, env=None):
-    if args is None:
-        args = sys.argv[1:]
-    shargs, cmdargs = split_args(args)
-    options, shargs = SH_OPT.parse_args(shargs)
-
-    if options.profile:
-        import lsprof
-        p = lsprof.Profiler()
-        p.enable(subcalls=True)
-        try:
-            return _sh(cwd, shargs, cmdargs, options, debugflags, env)
-        finally:
-            p.disable()
-            stats = lsprof.Stats(p.getstats())
-            stats.sort()
-            stats.pprint(top=10, file=sys.stderr, climit=5)
-    else:
-        return _sh(cwd, shargs, cmdargs, options, debugflags, env)
-            
-def main():
-    sys.exit(sh())
-
-if __name__=='__main__':
-    main()

+ 0 - 5
bitbake/lib/bb/pysh/pyshlex.py

@@ -13,11 +13,6 @@
 # PLY in pull mode. It was designed to work incrementally and it would not be
 # that hard to enable pull mode.
 import re
-try:
-    s = set()
-    del s
-except NameError:
-    from Set import Set as set
 
 from ply import lex
 from bb.pysh.sherrors import *

+ 10 - 7
bitbake/lib/bb/pysh/pyshyacc.py

@@ -636,13 +636,16 @@ def p_empty(p):
 def p_error(p):
     msg = []
     w = msg.append
-    w('%r\n' % p)
-    w('followed by:\n')
-    for i in range(5):
-        n = yacc.token()
-        if not n:
-            break
-        w('  %r\n' % n)
+    if p:
+        w('%r\n' % p)
+        w('followed by:\n')
+        for i in range(5):
+            n = yacc.token()
+            if not n:
+                break
+            w('  %r\n' % n)
+    else:
+        w('Unexpected EOF')
     raise sherrors.ShellSyntaxError(''.join(msg))
 
 # Build the parser

+ 0 - 26
bitbake/lib/bb/pysh/sherrors.py

@@ -13,29 +13,3 @@ class ShellError(Exception):
 
 class ShellSyntaxError(ShellError):
     pass
-    
-class UtilityError(ShellError):
-    """Raised upon utility syntax error (option or operand error)."""
-    pass
-   
-class ExpansionError(ShellError):
-    pass
-     
-class CommandNotFound(ShellError):
-    """Specified command was not found."""
-    pass
-    
-class RedirectionError(ShellError):
-    pass
-    
-class VarAssignmentError(ShellError):
-    """Variable assignment error."""
-    pass
-    
-class ExitSignal(ShellError):
-    """Exit signal."""
-    pass
-    
-class ReturnSignal(ShellError):
-    """Exit signal."""
-    pass

+ 0 - 77
bitbake/lib/bb/pysh/subprocess_fix.py

@@ -1,77 +0,0 @@
-# subprocess - Subprocesses with accessible I/O streams
-#
-# For more information about this module, see PEP 324.
-#
-# This module should remain compatible with Python 2.2, see PEP 291.
-#
-# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
-#
-# Licensed to PSF under a Contributor Agreement.
-# See http://www.python.org/2.4/license for licensing details.
-
-def list2cmdline(seq):
-    """
-    Translate a sequence of arguments into a command line
-    string, using the same rules as the MS C runtime:
-
-    1) Arguments are delimited by white space, which is either a
-       space or a tab.
-
-    2) A string surrounded by double quotation marks is
-       interpreted as a single argument, regardless of white space
-       contained within.  A quoted string can be embedded in an
-       argument.
-
-    3) A double quotation mark preceded by a backslash is
-       interpreted as a literal double quotation mark.
-
-    4) Backslashes are interpreted literally, unless they
-       immediately precede a double quotation mark.
-
-    5) If backslashes immediately precede a double quotation mark,
-       every pair of backslashes is interpreted as a literal
-       backslash.  If the number of backslashes is odd, the last
-       backslash escapes the next double quotation mark as
-       described in rule 3.
-    """
-
-    # See
-    # http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp
-    result = []
-    needquote = False
-    for arg in seq:
-        bs_buf = []
-
-        # Add a space to separate this argument from the others
-        if result:
-            result.append(' ')
-
-        needquote = (" " in arg) or ("\t" in arg) or ("|" in arg) or arg == ""
-        if needquote:
-            result.append('"')
-
-        for c in arg:
-            if c == '\\':
-                # Don't know if we need to double yet.
-                bs_buf.append(c)
-            elif c == '"':
-                # Double backspaces.
-                result.append('\\' * len(bs_buf)*2)
-                bs_buf = []
-                result.append('\\"')
-            else:
-                # Normal char
-                if bs_buf:
-                    result.extend(bs_buf)
-                    bs_buf = []
-                result.append(c)
-
-        # Add remaining backspaces, if any.
-        if bs_buf:
-            result.extend(bs_buf)
-
-        if needquote:
-            result.extend(bs_buf)
-            result.append('"')
-            
-    return ''.join(result)

+ 1 - 11
bitbake/lib/bb/remotedata.py

@@ -6,18 +6,8 @@ Provides support for using a datastore from the bitbake client
 
 # Copyright (C) 2016  Intel Corporation
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import bb.data
 

File diff suppressed because it is too large
+ 364 - 284
bitbake/lib/bb/runqueue.py


+ 1 - 13
bitbake/lib/bb/server/__init__.py

@@ -5,17 +5,5 @@
 # Copyright (C) 2006 - 2008  Richard Purdie
 # Copyright (C) 2013         Alexandru Damian
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-

+ 77 - 46
bitbake/lib/bb/server/process.py

@@ -3,18 +3,8 @@
 #
 # Copyright (C) 2010 Bob Foerster <robert@erafx.com>
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 """
     This module implements a multiprocessing.Process based server for bitbake.
@@ -130,6 +120,7 @@ class ProcessServer(multiprocessing.Process):
         bb.utils.set_process_name("Cooker")
 
         ready = []
+        newconnections = []
 
         self.controllersock = False
         fds = [self.sock]
@@ -138,37 +129,48 @@ class ProcessServer(multiprocessing.Process):
         print("Entering server connection loop")
 
         def disconnect_client(self, fds):
-            if not self.haveui:
-                return
             print("Disconnecting Client")
-            fds.remove(self.controllersock)
-            fds.remove(self.command_channel)
-            bb.event.unregister_UIHhandler(self.event_handle, True)
-            self.command_channel_reply.writer.close()
-            self.event_writer.writer.close()
-            del self.event_writer
-            self.controllersock.close()
-            self.controllersock = False
-            self.haveui = False
-            self.lastui = time.time()
-            self.cooker.clientComplete()
-            if self.timeout is None:
+            if self.controllersock:
+                fds.remove(self.controllersock)
+                self.controllersock.close()
+                self.controllersock = False
+            if self.haveui:
+                fds.remove(self.command_channel)
+                bb.event.unregister_UIHhandler(self.event_handle, True)
+                self.command_channel_reply.writer.close()
+                self.event_writer.writer.close()
+                self.command_channel.close()
+                self.command_channel = False
+                del self.event_writer
+                self.lastui = time.time()
+                self.cooker.clientComplete()
+                self.haveui = False
+            ready = select.select(fds,[],[],0)[0]
+            if newconnections:
+                print("Starting new client")
+                conn = newconnections.pop(-1)
+                fds.append(conn)
+                self.controllersock = conn
+            elif self.timeout is None and not ready:
                 print("No timeout, exiting.")
                 self.quit = True
 
         while not self.quit:
             if self.sock in ready:
-                self.controllersock, address = self.sock.accept()
-                if self.haveui:
-                    print("Dropping connection attempt as we have a UI %s" % (str(ready)))
-                    self.controllersock.close()
-                else:
-                    print("Accepting %s" % (str(ready)))
-                    fds.append(self.controllersock)
+                while select.select([self.sock],[],[],0)[0]:
+                    controllersock, address = self.sock.accept()
+                    if self.controllersock:
+                        print("Queuing %s (%s)" % (str(ready), str(newconnections)))
+                        newconnections.append(controllersock)
+                    else:
+                        print("Accepting %s (%s)" % (str(ready), str(newconnections)))
+                        self.controllersock = controllersock
+                        fds.append(controllersock)
             if self.controllersock in ready:
                 try:
-                    print("Connecting Client")
+                    print("Processing Client")
                     ui_fds = recvfds(self.controllersock, 3)
+                    print("Connecting Client")
 
                     # Where to write events to
                     writer = ConnectionWriter(ui_fds[0])
@@ -239,6 +241,12 @@ class ProcessServer(multiprocessing.Process):
         while not lock:
             with bb.utils.timeout(3):
                 lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=True)
+                if lock:
+                    # We hold the lock so we can remove the file (hide stale pid data)
+                    bb.utils.remove(lockfile)
+                    bb.utils.unlockfile(lock)
+                    return
+
                 if not lock:
                     # Some systems may not have lsof available
                     procs = None
@@ -259,10 +267,6 @@ class ProcessServer(multiprocessing.Process):
                     if procs:
                         msg += ":\n%s" % str(procs)
                     print(msg)
-                    return
-        # We hold the lock so we can remove the file (hide stale pid data)
-        bb.utils.remove(lockfile)
-        bb.utils.unlockfile(lock)
 
     def idle_commands(self, delay, fds=None):
         nextsleep = delay
@@ -398,36 +402,45 @@ class BitBakeServer(object):
         os.close(self.readypipein)
 
         ready = ConnectionReader(self.readypipe)
-        r = ready.poll(30)
+        r = ready.poll(5)
+        if not r:
+            bb.note("Bitbake server didn't start within 5 seconds, waiting for 90")
+            r = ready.poll(90)
         if r:
             try:
                 r = ready.get()
             except EOFError:
                 # Trap the child exitting/closing the pipe and error out
                 r = None
-        if not r or r != "ready":
+        if not r or r[0] != "r":
             ready.close()
-            bb.error("Unable to start bitbake server")
+            bb.error("Unable to start bitbake server (%s)" % str(r))
             if os.path.exists(logfile):
                 logstart_re = re.compile(self.start_log_format % ('([0-9]+)', '([0-9-]+ [0-9:.]+)'))
                 started = False
                 lines = []
+                lastlines = []
                 with open(logfile, "r") as f:
                     for line in f:
                         if started:
                             lines.append(line)
                         else:
+                            lastlines.append(line)
                             res = logstart_re.match(line.rstrip())
                             if res:
                                 ldatetime = datetime.datetime.strptime(res.group(2), self.start_log_datetime_format)
                                 if ldatetime >= startdatetime:
                                     started = True
                                     lines.append(line)
+                        if len(lastlines) > 60:
+                            lastlines = lastlines[-60:]
                 if lines:
-                    if len(lines) > 10:
-                        bb.error("Last 10 lines of server log for this session (%s):\n%s" % (logfile, "".join(lines[-10:])))
+                    if len(lines) > 60:
+                        bb.error("Last 60 lines of server log for this session (%s):\n%s" % (logfile, "".join(lines[-60:])))
                     else:
                         bb.error("Server log for this session (%s):\n%s" % (logfile, "".join(lines)))
+                elif lastlines:
+                        bb.error("Server didn't start, last 60 loglines (%s):\n%s" % (logfile, "".join(lastlines)))
             else:
                 bb.error("%s doesn't exist" % logfile)
 
@@ -437,17 +450,24 @@ class BitBakeServer(object):
 
     def _startServer(self):
         print(self.start_log_format % (os.getpid(), datetime.datetime.now().strftime(self.start_log_datetime_format)))
+        sys.stdout.flush()
+
         server = ProcessServer(self.bitbake_lock, self.sock, self.sockname)
         self.configuration.setServerRegIdleCallback(server.register_idle_function)
         os.close(self.readypipe)
         writer = ConnectionWriter(self.readypipein)
-        self.cooker = bb.cooker.BBCooker(self.configuration, self.featureset)
-        writer.send("ready")
+        try:
+            self.cooker = bb.cooker.BBCooker(self.configuration, self.featureset)
+        except bb.BBHandledException:
+            return None
+        writer.send("r")
         writer.close()
         server.cooker = self.cooker
         server.server_timeout = self.configuration.server_timeout
         server.xmlrpcinterface = self.configuration.xmlrpcinterface
         print("Started bitbake server pid %d" % os.getpid())
+        sys.stdout.flush()
+
         server.start()
 
 def connectProcessServer(sockname, featureset):
@@ -459,10 +479,20 @@ def connectProcessServer(sockname, featureset):
     readfd = writefd = readfd1 = writefd1 = readfd2 = writefd2 = None
     eq = command_chan_recv = command_chan = None
 
+    sock.settimeout(10)
+
     try:
         try:
             os.chdir(os.path.dirname(sockname))
-            sock.connect(os.path.basename(sockname))
+            finished = False
+            while not finished:
+                try:
+                    sock.connect(os.path.basename(sockname))
+                    finished = True
+                except IOError as e:
+                    if e.errno == errno.EWOULDBLOCK:
+                        pass
+                    raise
         finally:
             os.chdir(cwd)
 
@@ -493,7 +523,8 @@ def connectProcessServer(sockname, featureset):
             command_chan.close()
         for i in [writefd, readfd1, writefd2]:
             try:
-                os.close(i)
+                if i:
+                    os.close(i)
             except OSError:
                 pass
         sock.close()

+ 1 - 11
bitbake/lib/bb/server/xmlrpcclient.py

@@ -4,18 +4,8 @@
 # Copyright (C) 2006 - 2007  Michael 'Mickey' Lauer
 # Copyright (C) 2006 - 2008  Richard Purdie
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import os
 import sys

+ 1 - 11
bitbake/lib/bb/server/xmlrpcserver.py

@@ -4,18 +4,8 @@
 # Copyright (C) 2006 - 2007  Michael 'Mickey' Lauer
 # Copyright (C) 2006 - 2008  Richard Purdie
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import os
 import sys

+ 281 - 69
bitbake/lib/bb/siggen.py

@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
 import hashlib
 import logging
 import os
@@ -8,6 +12,8 @@ import bb.data
 import difflib
 import simplediff
 from bb.checksum import FileChecksumCache
+from bb import runqueue
+import hashserv
 
 logger = logging.getLogger('BitBake.SigGen')
 
@@ -37,12 +43,18 @@ class SignatureGenerator(object):
         self.runtaskdeps = {}
         self.file_checksum_values = {}
         self.taints = {}
+        self.unitaskhashes = {}
+        self.setscenetasks = {}
 
     def finalise(self, fn, d, varient):
         return
 
-    def get_taskhash(self, fn, task, deps, dataCache):
-        return "0"
+    def get_unihash(self, tid):
+        return self.taskhash[tid]
+
+    def get_taskhash(self, tid, deps, dataCache):
+        self.taskhash[tid] = hashlib.sha256(tid.encode("utf-8")).hexdigest()
+        return self.taskhash[tid]
 
     def writeout_file_checksum_cache(self):
         """Write/update the file checksum cache onto disk"""
@@ -64,14 +76,25 @@ class SignatureGenerator(object):
         return
 
     def get_taskdata(self):
-        return (self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints, self.basehash)
+        return (self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints, self.basehash, self.unitaskhashes, self.setscenetasks)
 
     def set_taskdata(self, data):
-        self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints, self.basehash = data
+        self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints, self.basehash, self.unitaskhashes, self.setscenetasks = data
 
     def reset(self, data):
         self.__init__(data)
 
+    def get_taskhashes(self):
+        return self.taskhash, self.unitaskhashes
+
+    def set_taskhashes(self, hashes):
+        self.taskhash, self.unitaskhashes = hashes
+
+    def save_unitaskhashes(self):
+        return
+
+    def set_setscene_tasks(self, setscene_tasks):
+        return
 
 class SignatureGeneratorBasic(SignatureGenerator):
     """
@@ -87,7 +110,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
         self.taints = {}
         self.gendeps = {}
         self.lookupcache = {}
-        self.pkgnameextract = re.compile("(?P<fn>.*)\..*")
+        self.setscenetasks = {}
         self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST") or "").split())
         self.taskwhitelist = None
         self.init_rundepcheck(data)
@@ -98,6 +121,9 @@ class SignatureGeneratorBasic(SignatureGenerator):
         else:
             self.checksum_cache = None
 
+        self.unihash_cache = bb.cache.SimpleCache("1")
+        self.unitaskhashes = self.unihash_cache.init_cache(data, "bb_unihashes.dat", {})
+
     def init_rundepcheck(self, data):
         self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST") or None
         if self.taskwhitelist:
@@ -113,10 +139,16 @@ class SignatureGeneratorBasic(SignatureGenerator):
         taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basewhitelist, fn)
 
         for task in tasklist:
-            k = fn + "." + task
-            if not ignore_mismatch and k in self.basehash and self.basehash[k] != basehash[k]:
-                bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (k, self.basehash[k], basehash[k]))
-            self.basehash[k] = basehash[k]
+            tid = fn + ":" + task
+            if not ignore_mismatch and tid in self.basehash and self.basehash[tid] != basehash[tid]:
+                bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (tid, self.basehash[tid], basehash[tid]))
+                bb.error("The following commands may help:")
+                cmd = "$ bitbake %s -c%s" % (d.getVar('PN'), task)
+                # Make sure sigdata is dumped before run printdiff
+                bb.error("%s -Snone" % cmd)
+                bb.error("Then:")
+                bb.error("%s -Sprintdiff\n" % cmd)
+            self.basehash[tid] = basehash[tid]
 
         self.taskdeps[fn] = taskdeps
         self.gendeps[fn] = gendeps
@@ -124,6 +156,9 @@ class SignatureGeneratorBasic(SignatureGenerator):
 
         return taskdeps
 
+    def set_setscene_tasks(self, setscene_tasks):
+        self.setscenetasks = setscene_tasks
+
     def finalise(self, fn, d, variant):
 
         mc = d.getVar("__BBMULTICONFIG", False) or ""
@@ -143,7 +178,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
         #    self.dump_sigtask(fn, task, d.getVar("STAMP"), False)
 
         for task in taskdeps:
-            d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task])
+            d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + ":" + task])
 
     def rundep_check(self, fn, recipename, task, dep, depname, dataCache):
         # Return True if we should keep the dependency, False to drop it
@@ -163,31 +198,26 @@ class SignatureGeneratorBasic(SignatureGenerator):
             pass
         return taint
 
-    def get_taskhash(self, fn, task, deps, dataCache):
+    def get_taskhash(self, tid, deps, dataCache):
 
-        mc = ''
-        if fn.startswith('multiconfig:'):
-            mc = fn.split(':')[1]
-        k = fn + "." + task
+        (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
 
-        data = dataCache.basetaskhash[k]
-        self.basehash[k] = data
-        self.runtaskdeps[k] = []
-        self.file_checksum_values[k] = []
+        data = dataCache.basetaskhash[tid]
+        self.basehash[tid] = data
+        self.runtaskdeps[tid] = []
+        self.file_checksum_values[tid] = []
         recipename = dataCache.pkg_fn[fn]
         for dep in sorted(deps, key=clean_basepath):
-            pkgname = self.pkgnameextract.search(dep).group('fn')
-            if mc:
-                depmc = pkgname.split(':')[1]
-                if mc != depmc:
-                    continue
-            depname = dataCache.pkg_fn[pkgname]
+            (depmc, _, deptaskname, depfn) = bb.runqueue.split_tid_mcfn(dep)
+            if mc != depmc:
+                continue
+            depname = dataCache.pkg_fn[depfn]
             if not self.rundep_check(fn, recipename, task, dep, depname, dataCache):
                 continue
             if dep not in self.taskhash:
                 bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep)
-            data = data + self.taskhash[dep]
-            self.runtaskdeps[k].append(dep)
+            data = data + self.get_unihash(dep)
+            self.runtaskdeps[tid].append(dep)
 
         if task in dataCache.file_checksums[fn]:
             if self.checksum_cache:
@@ -195,7 +225,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
             else:
                 checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename)
             for (f,cs) in checksums:
-                self.file_checksum_values[k].append((f,cs))
+                self.file_checksum_values[tid].append((f,cs))
                 if cs:
                     data = data + cs
 
@@ -205,16 +235,16 @@ class SignatureGeneratorBasic(SignatureGenerator):
             import uuid
             taint = str(uuid.uuid4())
             data = data + taint
-            self.taints[k] = "nostamp:" + taint
+            self.taints[tid] = "nostamp:" + taint
 
         taint = self.read_taint(fn, task, dataCache.stamp[fn])
         if taint:
             data = data + taint
-            self.taints[k] = taint
-            logger.warning("%s is tainted from a forced run" % k)
+            self.taints[tid] = taint
+            logger.warning("%s is tainted from a forced run" % tid)
 
-        h = hashlib.md5(data.encode("utf-8")).hexdigest()
-        self.taskhash[k] = h
+        h = hashlib.sha256(data.encode("utf-8")).hexdigest()
+        self.taskhash[tid] = h
         #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
         return h
 
@@ -227,17 +257,20 @@ class SignatureGeneratorBasic(SignatureGenerator):
             bb.fetch2.fetcher_parse_save()
             bb.fetch2.fetcher_parse_done()
 
+    def save_unitaskhashes(self):
+        self.unihash_cache.save(self.unitaskhashes)
+
     def dump_sigtask(self, fn, task, stampbase, runtime):
 
-        k = fn + "." + task
+        tid = fn + ":" + task
         referencestamp = stampbase
         if isinstance(runtime, str) and runtime.startswith("customfile"):
             sigfile = stampbase
             referencestamp = runtime[11:]
-        elif runtime and k in self.taskhash:
-            sigfile = stampbase + "." + task + ".sigdata" + "." + self.taskhash[k]
+        elif runtime and tid in self.taskhash:
+            sigfile = stampbase + "." + task + ".sigdata" + "." + self.get_unihash(tid)
         else:
-            sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[k]
+            sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[tid]
 
         bb.utils.mkdirhier(os.path.dirname(sigfile))
 
@@ -246,7 +279,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
         data['basewhitelist'] = self.basewhitelist
         data['taskwhitelist'] = self.taskwhitelist
         data['taskdeps'] = self.taskdeps[fn][task]
-        data['basehash'] = self.basehash[k]
+        data['basehash'] = self.basehash[tid]
         data['gendeps'] = {}
         data['varvals'] = {}
         data['varvals'][task] = self.lookupcache[fn][task]
@@ -256,30 +289,31 @@ class SignatureGeneratorBasic(SignatureGenerator):
             data['gendeps'][dep] = self.gendeps[fn][dep]
             data['varvals'][dep] = self.lookupcache[fn][dep]
 
-        if runtime and k in self.taskhash:
-            data['runtaskdeps'] = self.runtaskdeps[k]
-            data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[k]]
+        if runtime and tid in self.taskhash:
+            data['runtaskdeps'] = self.runtaskdeps[tid]
+            data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[tid]]
             data['runtaskhashes'] = {}
             for dep in data['runtaskdeps']:
-                data['runtaskhashes'][dep] = self.taskhash[dep]
-            data['taskhash'] = self.taskhash[k]
+                data['runtaskhashes'][dep] = self.get_unihash(dep)
+            data['taskhash'] = self.taskhash[tid]
+            data['unihash'] = self.get_unihash(tid)
 
         taint = self.read_taint(fn, task, referencestamp)
         if taint:
             data['taint'] = taint
 
-        if runtime and k in self.taints:
-            if 'nostamp:' in self.taints[k]:
-                data['taint'] = self.taints[k]
+        if runtime and tid in self.taints:
+            if 'nostamp:' in self.taints[tid]:
+                data['taint'] = self.taints[tid]
 
         computed_basehash = calc_basehash(data)
-        if computed_basehash != self.basehash[k]:
-            bb.error("Basehash mismatch %s versus %s for %s" % (computed_basehash, self.basehash[k], k))
-        if runtime and k in self.taskhash:
+        if computed_basehash != self.basehash[tid]:
+            bb.error("Basehash mismatch %s versus %s for %s" % (computed_basehash, self.basehash[tid], tid))
+        if runtime and tid in self.taskhash:
             computed_taskhash = calc_taskhash(data)
-            if computed_taskhash != self.taskhash[k]:
-                bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[k], k))
-                sigfile = sigfile.replace(self.taskhash[k], computed_taskhash)
+            if computed_taskhash != self.taskhash[tid]:
+                bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[tid], tid))
+                sigfile = sigfile.replace(self.taskhash[tid], computed_taskhash)
 
         fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
         try:
@@ -299,30 +333,34 @@ class SignatureGeneratorBasic(SignatureGenerator):
         if fn in self.taskdeps:
             for task in self.taskdeps[fn]:
                 tid = fn + ":" + task
-                (mc, _, _) = bb.runqueue.split_tid(tid)
-                k = fn + "." + task
-                if k not in self.taskhash:
+                mc = bb.runqueue.mc_from_tid(tid)
+                if tid not in self.taskhash:
                     continue
-                if dataCaches[mc].basetaskhash[k] != self.basehash[k]:
-                    bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % k)
-                    bb.error("The mismatched hashes were %s and %s" % (dataCaches[mc].basetaskhash[k], self.basehash[k]))
+                if dataCaches[mc].basetaskhash[tid] != self.basehash[tid]:
+                    bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % tid)
+                    bb.error("The mismatched hashes were %s and %s" % (dataCaches[mc].basetaskhash[tid], self.basehash[tid]))
                 self.dump_sigtask(fn, task, dataCaches[mc].stamp[fn], True)
 
 class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
     name = "basichash"
 
+    def get_stampfile_hash(self, tid):
+        if tid in self.taskhash:
+            return self.taskhash[tid]
+
+        # If task is not in basehash, then error
+        return self.basehash[tid]
+
     def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False):
         if taskname != "do_setscene" and taskname.endswith("_setscene"):
-            k = fn + "." + taskname[:-9]
+            tid = fn + ":" + taskname[:-9]
         else:
-            k = fn + "." + taskname
+            tid = fn + ":" + taskname
         if clean:
             h = "*"
-        elif k in self.taskhash:
-            h = self.taskhash[k]
         else:
-            # If k is not in basehash, then error
-            h = self.basehash[k]
+            h = self.get_stampfile_hash(tid)
+
         return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
 
     def stampcleanmask(self, stampbase, fn, taskname, extrainfo):
@@ -332,6 +370,172 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
         bb.note("Tainting hash to force rebuild of task %s, %s" % (fn, task))
         bb.build.write_taint(task, d, fn)
 
+class SignatureGeneratorUniHashMixIn(object):
+    def get_taskdata(self):
+        return (self.server, self.method) + super().get_taskdata()
+
+    def set_taskdata(self, data):
+        self.server, self.method = data[:2]
+        super().set_taskdata(data[2:])
+
+    def client(self):
+        if getattr(self, '_client', None) is None:
+            self._client = hashserv.create_client(self.server)
+        return self._client
+
+    def __get_task_unihash_key(self, tid):
+        # TODO: The key only *needs* to be the taskhash, the tid is just
+        # convenient
+        return '%s:%s' % (tid.rsplit("/", 1)[1], self.taskhash[tid])
+
+    def get_stampfile_hash(self, tid):
+        if tid in self.taskhash:
+            # If a unique hash is reported, use it as the stampfile hash. This
+            # ensures that if a task won't be re-run if the taskhash changes,
+            # but it would result in the same output hash
+            unihash = self.unitaskhashes.get(self.__get_task_unihash_key(tid), None)
+            if unihash is not None:
+                return unihash
+
+        return super().get_stampfile_hash(tid)
+
+    def set_unihash(self, tid, unihash):
+        self.unitaskhashes[self.__get_task_unihash_key(tid)] = unihash
+
+    def get_unihash(self, tid):
+        taskhash = self.taskhash[tid]
+
+        # If its not a setscene task we can return
+        if self.setscenetasks and tid not in self.setscenetasks:
+            return taskhash
+
+        key = self.__get_task_unihash_key(tid)
+
+        # TODO: This cache can grow unbounded. It probably only needs to keep
+        # for each task
+        unihash = self.unitaskhashes.get(key, None)
+        if unihash is not None:
+            return unihash
+
+        # In the absence of being able to discover a unique hash from the
+        # server, make it be equivalent to the taskhash. The unique "hash" only
+        # really needs to be a unique string (not even necessarily a hash), but
+        # making it match the taskhash has a few advantages:
+        #
+        # 1) All of the sstate code that assumes hashes can be the same
+        # 2) It provides maximal compatibility with builders that don't use
+        #    an equivalency server
+        # 3) The value is easy for multiple independent builders to derive the
+        #    same unique hash from the same input. This means that if the
+        #    independent builders find the same taskhash, but it isn't reported
+        #    to the server, there is a better chance that they will agree on
+        #    the unique hash.
+        unihash = taskhash
+
+        try:
+            data = self.client().get_unihash(self.method, self.taskhash[tid])
+            if data:
+                unihash = data
+                # A unique hash equal to the taskhash is not very interesting,
+                # so it is reported it at debug level 2. If they differ, that
+                # is much more interesting, so it is reported at debug level 1
+                bb.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
+            else:
+                bb.debug(2, 'No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
+        except hashserv.client.HashConnectionError as e:
+            bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
+
+        self.unitaskhashes[key] = unihash
+        return unihash
+
+    def report_unihash(self, path, task, d):
+        import importlib
+
+        taskhash = d.getVar('BB_TASKHASH')
+        unihash = d.getVar('BB_UNIHASH')
+        report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1'
+        tempdir = d.getVar('T')
+        fn = d.getVar('BB_FILENAME')
+        tid = fn + ':do_' + task
+        key = tid.rsplit("/", 1)[1] + ':' + taskhash
+
+        if self.setscenetasks and tid not in self.setscenetasks:
+            return
+
+        # Sanity checks
+        cache_unihash = self.unitaskhashes.get(key, None)
+        if cache_unihash is None:
+            bb.fatal('%s not in unihash cache. Please report this error' % key)
+
+        if cache_unihash != unihash:
+            bb.fatal("Cache unihash %s doesn't match BB_UNIHASH %s" % (cache_unihash, unihash))
+
+        sigfile = None
+        sigfile_name = "depsig.do_%s.%d" % (task, os.getpid())
+        sigfile_link = "depsig.do_%s" % task
+
+        try:
+            sigfile = open(os.path.join(tempdir, sigfile_name), 'w+b')
+
+            locs = {'path': path, 'sigfile': sigfile, 'task': task, 'd': d}
+
+            if "." in self.method:
+                (module, method) = self.method.rsplit('.', 1)
+                locs['method'] = getattr(importlib.import_module(module), method)
+                outhash = bb.utils.better_eval('method(path, sigfile, task, d)', locs)
+            else:
+                outhash = bb.utils.better_eval(self.method + '(path, sigfile, task, d)', locs)
+
+            try:
+                extra_data = {}
+
+                owner = d.getVar('SSTATE_HASHEQUIV_OWNER')
+                if owner:
+                    extra_data['owner'] = owner
+
+                if report_taskdata:
+                    sigfile.seek(0)
+
+                    extra_data['PN'] = d.getVar('PN')
+                    extra_data['PV'] = d.getVar('PV')
+                    extra_data['PR'] = d.getVar('PR')
+                    extra_data['task'] = task
+                    extra_data['outhash_siginfo'] = sigfile.read().decode('utf-8')
+
+                data = self.client().report_unihash(taskhash, self.method, outhash, unihash, extra_data)
+                new_unihash = data['unihash']
+
+                if new_unihash != unihash:
+                    bb.debug(1, 'Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server))
+                    bb.event.fire(bb.runqueue.taskUniHashUpdate(fn + ':do_' + task, new_unihash), d)
+                else:
+                    bb.debug(1, 'Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
+            except hashserv.client.HashConnectionError as e:
+                bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
+        finally:
+            if sigfile:
+                sigfile.close()
+
+                sigfile_link_path = os.path.join(tempdir, sigfile_link)
+                bb.utils.remove(sigfile_link_path)
+
+                try:
+                    os.symlink(sigfile_name, sigfile_link_path)
+                except OSError:
+                    pass
+
+
+#
+# Dummy class used for bitbake-selftest
+#
+class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureGeneratorBasicHash):
+    name = "TestEquivHash"
+    def init_rundepcheck(self, data):
+        super().init_rundepcheck(data)
+        self.server = data.getVar('BB_HASHSERVE')
+        self.method = "sstate_output_hash"
+
+
 def dump_this_task(outfile, d):
     import bb.parse
     fn = d.getVar("BB_FILENAME")
@@ -392,13 +596,13 @@ def list_inline_diff(oldlist, newlist, colors=None):
 
 def clean_basepath(a):
     mc = None
-    if a.startswith("multiconfig:"):
+    if a.startswith("mc:"):
         _, mc, a = a.split(":", 2)
     b = a.rsplit("/", 2)[1] + '/' + a.rsplit("/", 2)[2]
     if a.startswith("virtual:"):
         b = b + ":" + a.rsplit(":", 1)[0]
     if mc:
-        b = b + ":multiconfig:" + mc
+        b = b + ":mc:" + mc
     return b
 
 def clean_basepaths(a):
@@ -623,6 +827,10 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
     a_taint = a_data.get('taint', None)
     b_taint = b_data.get('taint', None)
     if a_taint != b_taint:
+        if a_taint and a_taint.startswith('nostamp:'):
+            a_taint = a_taint.replace('nostamp:', 'nostamp(uuid4):')
+        if b_taint and b_taint.startswith('nostamp:'):
+            b_taint = b_taint.replace('nostamp:', 'nostamp(uuid4):')
         output.append(color_format("{color_title}Taint (by forced/invalidated task) changed{color_default} from %s to %s") % (a_taint, b_taint))
 
     return output
@@ -642,7 +850,7 @@ def calc_basehash(sigdata):
         if val is not None:
             basedata = basedata + str(val)
 
-    return hashlib.md5(basedata.encode("utf-8")).hexdigest()
+    return hashlib.sha256(basedata.encode("utf-8")).hexdigest()
 
 def calc_taskhash(sigdata):
     data = sigdata['basehash']
@@ -660,7 +868,7 @@ def calc_taskhash(sigdata):
         else:
             data = data + sigdata['taint']
 
-    return hashlib.md5(data.encode("utf-8")).hexdigest()
+    return hashlib.sha256(data.encode("utf-8")).hexdigest()
 
 
 def dump_sigfile(a):
@@ -695,7 +903,11 @@ def dump_sigfile(a):
             output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep]))
 
     if 'taint' in a_data:
-        output.append("Tainted (by forced/invalidated task): %s" % a_data['taint'])
+        if a_data['taint'].startswith('nostamp:'):
+            msg = a_data['taint'].replace('nostamp:', 'nostamp(uuid4):')
+        else:
+            msg = a_data['taint']
+        output.append("Tainted (by forced/invalidated task): %s" % msg)
 
     if 'task' in a_data:
         computed_basehash = calc_basehash(a_data)

+ 2 - 15
bitbake/lib/bb/taskdata.py

@@ -1,6 +1,3 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 """
 BitBake 'TaskData' implementation
 
@@ -10,18 +7,8 @@ Task data collection and handling
 
 # Copyright (C) 2006  Richard Purdie
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import logging
 import re
@@ -93,7 +80,7 @@ class TaskData:
         def add_mcdepends(task):
             for dep in task_deps['mcdepends'][task].split():
                 if len(dep.split(':')) != 5:
-                    bb.msg.fatal("TaskData", "Error for %s:%s[%s], multiconfig dependency %s does not contain exactly four  ':' characters.\n Task '%s' should be specified in the form 'multiconfig:fromMC:toMC:packagename:task'" % (fn, task, 'mcdepends', dep, 'mcdepends'))
+                    bb.msg.fatal("TaskData", "Error for %s:%s[%s], multiconfig dependency %s does not contain exactly four  ':' characters.\n Task '%s' should be specified in the form 'mc:fromMC:toMC:packagename:task'" % (fn, task, 'mcdepends', dep, 'mcdepends'))
                 if dep not in self.mcdepends:
                     self.mcdepends.append(dep)
 

+ 8 - 14
bitbake/lib/bb/tests/codeparser.py

@@ -1,23 +1,10 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # BitBake Test for codeparser.py
 #
 # Copyright (C) 2010 Chris Larson
 # Copyright (C) 2012 Richard Purdie
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
 #
 
 import unittest
@@ -123,6 +110,13 @@ ${D}${libdir}/pkgconfig/*.pc
         self.parseExpression("sed -i -e 's:IP{:I${:g' $pc")
         self.assertExecs(set(["sed"]))
 
+    def test_parameter_expansion_modifiers(self):
+        # - and + are also valid modifiers for parameter expansion, but are
+        # valid characters in bitbake variable names, so are not included here
+        for i in ('=', ':-', ':=', '?', ':?', ':+', '#', '%', '##', '%%'):
+            name = "foo%sbar" % i
+            self.parseExpression("${%s}" % name)
+            self.assertNotIn(name, self.references)
 
     def test_until(self):
         self.parseExpression("until false; do echo true; done")

+ 1 - 14
bitbake/lib/bb/tests/cooker.py

@@ -1,20 +1,7 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # BitBake Tests for cooker.py
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
 #
 
 import unittest

+ 2 - 15
bitbake/lib/bb/tests/cow.py

@@ -1,22 +1,9 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # BitBake Tests for Copy-on-Write (cow.py)
 #
-# Copyright 2006 Holger Freyther <freyther@handhelds.org>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
 #
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# Copyright 2006 Holger Freyther <freyther@handhelds.org>
 #
 
 import unittest

+ 24 - 15
bitbake/lib/bb/tests/data.py

@@ -1,23 +1,10 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # BitBake Tests for the Data Store (data.py/data_smart.py)
 #
 # Copyright (C) 2010 Chris Larson
 # Copyright (C) 2012 Richard Purdie
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
 #
 
 import unittest
@@ -394,6 +381,28 @@ class TestOverrides(unittest.TestCase):
         self.d.setVar("OVERRIDES", "foo:bar:some_val")
         self.assertEqual(self.d.getVar("TEST"), " testvalue5")
 
+    def test_append_and_override_1(self):
+        self.d.setVar("TEST_append", "testvalue2")
+        self.d.setVar("TEST_bar", "testvalue3")
+        self.assertEqual(self.d.getVar("TEST"), "testvalue3testvalue2")
+
+    def test_append_and_override_2(self):
+        self.d.setVar("TEST_append_bar", "testvalue2")
+        self.assertEqual(self.d.getVar("TEST"), "testvaluetestvalue2")
+
+    def test_append_and_override_3(self):
+        self.d.setVar("TEST_bar_append", "testvalue2")
+        self.assertEqual(self.d.getVar("TEST"), "testvalue2")
+
+    # Test an override with _<numeric> in it based on a real world OE issue
+    def test_underscore_override(self):
+        self.d.setVar("TARGET_ARCH", "x86_64")
+        self.d.setVar("PN", "test-${TARGET_ARCH}")
+        self.d.setVar("VERSION", "1")
+        self.d.setVar("VERSION_pn-test-${TARGET_ARCH}", "2")
+        self.d.setVar("OVERRIDES", "pn-${PN}")
+        bb.data.expandKeys(self.d)
+        self.assertEqual(self.d.getVar("VERSION"), "2")
 
 class TestKeyExpansion(unittest.TestCase):
     def setUp(self):
@@ -470,7 +479,7 @@ class TaskHash(unittest.TestCase):
             tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d)
             taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, set(), "somefile")
             bb.warn(str(lookupcache))
-            return basehash["somefile." + taskname]
+            return basehash["somefile:" + taskname]
 
         d = bb.data.init()
         d.setVar("__BBTASKS", ["mytask"])

+ 1 - 22
bitbake/lib/bb/tests/event.py

@@ -1,22 +1,9 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # BitBake Tests for the Event implementation (event.py)
 #
 # Copyright (C) 2017 Intel Corporation
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
 #
 
 import unittest
@@ -574,14 +561,6 @@ class EventClassesTest(unittest.TestCase):
         self.assertEqual(event.fn(1), callback(1))
         self.assertEqual(event.pid, EventClassesTest._worker_pid)
 
-    def test_StampUpdate(self):
-        targets = ["foo", "bar"]
-        stampfns = [lambda:"foobar"]
-        event = bb.event.StampUpdate(targets, stampfns)
-        self.assertEqual(event.targets, targets)
-        self.assertEqual(event.stampPrefix, stampfns)
-        self.assertEqual(event.pid, EventClassesTest._worker_pid)
-
     def test_BuildBase(self):
         """ Test base class for bitbake build events """
         name = "foo"

+ 286 - 22
bitbake/lib/bb/tests/fetch.py

@@ -1,22 +1,9 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
 #
 # BitBake Tests for the Fetcher (fetch2/)
 #
 # Copyright (C) 2012 Richard Purdie
 #
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
 #
 
 import unittest
@@ -893,12 +880,201 @@ class FetcherNetworkTest(FetcherTest):
 
     @skipIfNoNetwork()
     def test_git_submodule(self):
-        fetcher = bb.fetch.Fetch(["gitsm://git.yoctoproject.org/git-submodule-test;rev=f12e57f2edf0aa534cf1616fa983d165a92b0842"], self.d)
+        # URL with ssh submodules
+        url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=ssh-gitsm-tests;rev=049da4a6cb198d7c0302e9e8b243a1443cb809a7"
+        # Original URL (comment this if you have ssh access to git.yoctoproject.org)
+        url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=master;rev=a2885dd7d25380d23627e7544b7bbb55014b16ee"
+        fetcher = bb.fetch.Fetch([url], self.d)
+        fetcher.download()
+        # Previous cwd has been deleted
+        os.chdir(os.path.dirname(self.unpackdir))
+        fetcher.unpack(self.unpackdir)
+
+        repo_path = os.path.join(self.tempdir, 'unpacked', 'git')
+        self.assertTrue(os.path.exists(repo_path), msg='Unpacked repository missing')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, 'bitbake')), msg='bitbake submodule missing')
+        self.assertFalse(os.path.exists(os.path.join(repo_path, 'na')), msg='uninitialized submodule present')
+
+        # Only when we're running the extended test with a submodule's submodule, can we check this.
+        if os.path.exists(os.path.join(repo_path, 'bitbake-gitsm-test1')):
+            self.assertTrue(os.path.exists(os.path.join(repo_path, 'bitbake-gitsm-test1', 'bitbake')), msg='submodule of submodule missing')
+
+    @skipIfNoNetwork()
+    def test_git_submodule_dbus_broker(self):
+        # The following external repositories have show failures in fetch and unpack operations
+        # We want to avoid regressions!
+        url = "gitsm://github.com/bus1/dbus-broker;protocol=git;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2"
+        fetcher = bb.fetch.Fetch([url], self.d)
+        fetcher.download()
+        # Previous cwd has been deleted
+        os.chdir(os.path.dirname(self.unpackdir))
+        fetcher.unpack(self.unpackdir)
+
+        repo_path = os.path.join(self.tempdir, 'unpacked', 'git')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/subprojects/c-dvar/config')), msg='Missing submodule config "subprojects/c-dvar"')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/subprojects/c-list/config')), msg='Missing submodule config "subprojects/c-list"')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/subprojects/c-rbtree/config')), msg='Missing submodule config "subprojects/c-rbtree"')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/subprojects/c-sundry/config')), msg='Missing submodule config "subprojects/c-sundry"')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/subprojects/c-utf8/config')), msg='Missing submodule config "subprojects/c-utf8"')
+
+    @skipIfNoNetwork()
+    def test_git_submodule_CLI11(self):
+        url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf"
+        fetcher = bb.fetch.Fetch([url], self.d)
+        fetcher.download()
+        # Previous cwd has been deleted
+        os.chdir(os.path.dirname(self.unpackdir))
+        fetcher.unpack(self.unpackdir)
+
+        repo_path = os.path.join(self.tempdir, 'unpacked', 'git')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/extern/googletest/config')), msg='Missing submodule config "extern/googletest"')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/extern/json/config')), msg='Missing submodule config "extern/json"')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/extern/sanitizers/config')), msg='Missing submodule config "extern/sanitizers"')
+
+    @skipIfNoNetwork()
+    def test_git_submodule_update_CLI11(self):
+        """ Prevent regression on update detection not finding missing submodule, or modules without needed commits """
+        url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714"
+        fetcher = bb.fetch.Fetch([url], self.d)
+        fetcher.download()
+
+        # CLI11 that pulls in a newer nlohmann-json
+        url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca"
+        fetcher = bb.fetch.Fetch([url], self.d)
+        fetcher.download()
+        # Previous cwd has been deleted
+        os.chdir(os.path.dirname(self.unpackdir))
+        fetcher.unpack(self.unpackdir)
+
+        repo_path = os.path.join(self.tempdir, 'unpacked', 'git')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/extern/googletest/config')), msg='Missing submodule config "extern/googletest"')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/extern/json/config')), msg='Missing submodule config "extern/json"')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/extern/sanitizers/config')), msg='Missing submodule config "extern/sanitizers"')
+
+    @skipIfNoNetwork()
+    def test_git_submodule_aktualizr(self):
+        url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=git;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
+        fetcher = bb.fetch.Fetch([url], self.d)
         fetcher.download()
         # Previous cwd has been deleted
         os.chdir(os.path.dirname(self.unpackdir))
         fetcher.unpack(self.unpackdir)
 
+        repo_path = os.path.join(self.tempdir, 'unpacked', 'git')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/partial/extern/isotp-c/config')), msg='Missing submodule config "partial/extern/isotp-c/config"')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/partial/extern/isotp-c/modules/deps/bitfield-c/config')), msg='Missing submodule config "partial/extern/isotp-c/modules/deps/bitfield-c/config"')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, 'partial/extern/isotp-c/deps/bitfield-c/.git')), msg="Submodule of submodule isotp-c did not unpack properly")
+        self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/tests/tuf-test-vectors/config')), msg='Missing submodule config "tests/tuf-test-vectors/config"')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/third_party/googletest/config')), msg='Missing submodule config "third_party/googletest/config"')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/third_party/HdrHistogram_c/config')), msg='Missing submodule config "third_party/HdrHistogram_c/config"')
+
+    @skipIfNoNetwork()
+    def test_git_submodule_iotedge(self):
+        """ Prevent regression on deeply nested submodules not being checked out properly, even though they were fetched. """
+
+        # This repository also has submodules where the module (name), path and url do not align
+        url = "gitsm://github.com/azure/iotedge.git;protocol=git;rev=d76e0316c6f324345d77c48a83ce836d09392699"
+        fetcher = bb.fetch.Fetch([url], self.d)
+        fetcher.download()
+        # Previous cwd has been deleted
+        os.chdir(os.path.dirname(self.unpackdir))
+        fetcher.unpack(self.unpackdir)
+
+        repo_path = os.path.join(self.tempdir, 'unpacked', 'git')
+
+        self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/c-shared/README.md')), msg='Missing submodule checkout')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/c-shared/testtools/ctest/README.md')), msg='Missing submodule checkout')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/c-shared/testtools/testrunner/readme.md')), msg='Missing submodule checkout')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/c-shared/testtools/umock-c/readme.md')), msg='Missing submodule checkout')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/c-shared/testtools/umock-c/deps/ctest/README.md')), msg='Missing submodule checkout')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/c-shared/testtools/umock-c/deps/testrunner/readme.md')), msg='Missing submodule checkout')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/README.md')), msg='Missing submodule checkout')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/README.md')), msg='Missing submodule checkout')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/ctest/README.md')), msg='Missing submodule checkout')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/testrunner/readme.md')), msg='Missing submodule checkout')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/readme.md')), msg='Missing submodule checkout')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/ctest/README.md')), msg='Missing submodule checkout')
+        self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/testrunner/readme.md')), msg='Missing submodule checkout')
+
+class SVNTest(FetcherTest):
+    def skipIfNoSvn():
+        import shutil
+        if not shutil.which("svn"):
+            return unittest.skip("svn not installed,  tests being skipped")
+
+        if not shutil.which("svnadmin"):
+            return unittest.skip("svnadmin not installed,  tests being skipped")
+
+        return lambda f: f
+
+    @skipIfNoSvn()
+    def setUp(self):
+        """ Create a local repository """
+
+        super(SVNTest, self).setUp()
+
+        # Create something we can fetch
+        src_dir = tempfile.mkdtemp(dir=self.tempdir,
+                                   prefix='svnfetch_srcdir_')
+        src_dir = os.path.abspath(src_dir)
+        bb.process.run("echo readme > README.md", cwd=src_dir)
+
+        # Store it in a local SVN repository
+        repo_dir = tempfile.mkdtemp(dir=self.tempdir,
+                                   prefix='svnfetch_localrepo_')
+        repo_dir = os.path.abspath(repo_dir)
+        bb.process.run("svnadmin create project", cwd=repo_dir)
+
+        self.repo_url = "file://%s/project" % repo_dir
+        bb.process.run("svn import --non-interactive -m 'Initial import' %s %s/trunk" % (src_dir, self.repo_url),
+                       cwd=repo_dir)
+
+        bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir)
+        # Github will emulate SVN.  Use this to check if we're downloding...
+        bb.process.run("svn propset svn:externals 'bitbake http://github.com/openembedded/bitbake' .",
+                       cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
+        bb.process.run("svn commit --non-interactive -m 'Add external'",
+                       cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
+
+        self.src_dir = src_dir
+        self.repo_dir = repo_dir
+
+    @skipIfNoSvn()
+    def tearDown(self):
+        os.chdir(self.origdir)
+        if os.environ.get("BB_TMPDIR_NOCLEAN") == "yes":
+            print("Not cleaning up %s. Please remove manually." % self.tempdir)
+        else:
+            bb.utils.prunedir(self.tempdir)
+
+    @skipIfNoSvn()
+    @skipIfNoNetwork()
+    def test_noexternal_svn(self):
+        # Always match the rev count from setUp (currently rev 2)
+        url = "svn://%s;module=trunk;protocol=file;rev=2" % self.repo_url.replace('file://', '')
+        fetcher = bb.fetch.Fetch([url], self.d)
+        fetcher.download()
+        os.chdir(os.path.dirname(self.unpackdir))
+        fetcher.unpack(self.unpackdir)
+
+        self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk")
+        self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents")
+        self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk')), msg="External dir should NOT exist")
+        self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk', 'README')), msg="External README should NOT exit")
+
+    @skipIfNoSvn()
+    def test_external_svn(self):
+        # Always match the rev count from setUp (currently rev 2)
+        url = "svn://%s;module=trunk;protocol=file;externals=allowed;rev=2" % self.repo_url.replace('file://', '')
+        fetcher = bb.fetch.Fetch([url], self.d)
+        fetcher.download()
+        os.chdir(os.path.dirname(self.unpackdir))
+        fetcher.unpack(self.unpackdir)
+
+        self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk")
+        self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents")
+        self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk')), msg="External dir should exist")
+        self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk', 'README')), msg="External README should exit")
 
 class TrustedNetworksTest(FetcherTest):
     def test_trusted_network(self):
@@ -1024,8 +1200,8 @@ class FetchLatestVersionTest(FetcherTest):
         # packages with valid UPSTREAM_CHECK_URI and UPSTREAM_CHECK_REGEX
         ("cups", "http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2", "https://github.com/apple/cups/releases", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
             : "2.0.0",
-        ("db", "http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz", "http://www.oracle.com/technetwork/products/berkeleydb/downloads/index-082944.html", "http://download.oracle.com/otn/berkeley-db/(?P<name>db-)(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz")
-            : "6.1.19",
+        ("db", "http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz", "http://ftp.debian.org/debian/pool/main/d/db5.3/", "(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz")
+            : "5.3.10",
     }
 
     @skipIfNoNetwork()
@@ -1280,7 +1456,7 @@ class GitShallowTest(FetcherTest):
 
     def fetch(self, uri=None):
         if uri is None:
-            uris = self.d.getVar('SRC_URI', True).split()
+            uris = self.d.getVar('SRC_URI').split()
             uri = uris[0]
             d = self.d
         else:
@@ -1312,6 +1488,7 @@ class GitShallowTest(FetcherTest):
         # fetch and unpack, from the shallow tarball
         bb.utils.remove(self.gitdir, recurse=True)
         bb.utils.remove(ud.clonedir, recurse=True)
+        bb.utils.remove(ud.clonedir.replace('gitsource', 'gitsubmodule'), recurse=True)
 
         # confirm that the unpacked repo is used when no git clone or git
         # mirror tarball is available
@@ -1338,7 +1515,7 @@ class GitShallowTest(FetcherTest):
 
         srcrev = self.git('rev-parse HEAD', cwd=self.srcdir).strip()
         self.d.setVar('SRCREV', srcrev)
-        uri = self.d.getVar('SRC_URI', True).split()[0]
+        uri = self.d.getVar('SRC_URI').split()[0]
         uri = '%s;nobranch=1;bare=1' % uri
 
         self.fetch_shallow(uri)
@@ -1466,6 +1643,7 @@ class GitShallowTest(FetcherTest):
         self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir)
         self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir)
         self.add_empty_file('asub', cwd=smdir)
+        self.add_empty_file('bsub', cwd=smdir)
 
         self.git('submodule init', cwd=self.srcdir)
         self.git('submodule add file://%s' % smdir, cwd=self.srcdir)
@@ -1475,10 +1653,16 @@ class GitShallowTest(FetcherTest):
         uri = 'gitsm://%s;protocol=file;subdir=${S}' % self.srcdir
         fetcher, ud = self.fetch_shallow(uri)
 
+        # Verify the main repository is shallow
         self.assertRevCount(1)
-        assert './.git/modules/' in bb.process.run('tar -tzf %s' % os.path.join(self.dldir, ud.mirrortarballs[0]))[0]
+
+        # Verify the gitsubmodule directory is present
         assert os.listdir(os.path.join(self.gitdir, 'gitsubmodule'))
 
+        # Verify the submodule is also shallow
+        self.assertRevCount(1, cwd=os.path.join(self.gitdir, 'gitsubmodule'))
+
+
     if any(os.path.exists(os.path.join(p, 'git-annex')) for p in os.environ.get('PATH').split(':')):
         def test_shallow_annex(self):
             self.add_empty_file('a')
@@ -1510,7 +1694,7 @@ class GitShallowTest(FetcherTest):
         self.add_empty_file('f')
         self.assertRevCount(7, cwd=self.srcdir)
 
-        uri = self.d.getVar('SRC_URI', True).split()[0]
+        uri = self.d.getVar('SRC_URI').split()[0]
         uri = '%s;branch=master,a_branch;name=master,a_branch' % uri
 
         self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
@@ -1536,7 +1720,7 @@ class GitShallowTest(FetcherTest):
         self.add_empty_file('f')
         self.assertRevCount(7, cwd=self.srcdir)
 
-        uri = self.d.getVar('SRC_URI', True).split()[0]
+        uri = self.d.getVar('SRC_URI').split()[0]
         uri = '%s;branch=master,a_branch;name=master,a_branch' % uri
 
         self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
@@ -1724,3 +1908,83 @@ class GitShallowTest(FetcherTest):
 
         dir = os.listdir(self.unpackdir + "/git/")
         self.assertIn("fstests.doap", dir)
+
+class GitLfsTest(FetcherTest):
+    def setUp(self):
+        FetcherTest.setUp(self)
+
+        self.gitdir = os.path.join(self.tempdir, 'git')
+        self.srcdir = os.path.join(self.tempdir, 'gitsource')
+        
+        self.d.setVar('WORKDIR', self.tempdir)
+        self.d.setVar('S', self.gitdir)
+        self.d.delVar('PREMIRRORS')
+        self.d.delVar('MIRRORS')
+
+        self.d.setVar('SRCREV', '${AUTOREV}')
+        self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}')
+
+        bb.utils.mkdirhier(self.srcdir)
+        self.git('init', cwd=self.srcdir)
+        with open(os.path.join(self.srcdir, '.gitattributes'), 'wt') as attrs:
+            attrs.write('*.mp3 filter=lfs -text')
+        self.git(['add', '.gitattributes'], cwd=self.srcdir)
+        self.git(['commit', '-m', "attributes", '.gitattributes'], cwd=self.srcdir)
+
+    def git(self, cmd, cwd=None):
+        if isinstance(cmd, str):
+            cmd = 'git ' + cmd
+        else:
+            cmd = ['git'] + cmd
+        if cwd is None:
+            cwd = self.gitdir
+        return bb.process.run(cmd, cwd=cwd)[0]
+
+    def fetch(self, uri=None):
+        uris = self.d.getVar('SRC_URI').split()
+        uri = uris[0]
+        d = self.d
+
+        fetcher = bb.fetch2.Fetch(uris, d)
+        fetcher.download()
+        ud = fetcher.ud[uri]
+        return fetcher, ud
+
+    def test_lfs_enabled(self):
+        import shutil
+
+        uri = 'git://%s;protocol=file;subdir=${S};lfs=1' % self.srcdir
+        self.d.setVar('SRC_URI', uri)
+
+        fetcher, ud = self.fetch()
+        self.assertIsNotNone(ud.method._find_git_lfs)
+
+        # If git-lfs can be found, the unpack should be successful
+        ud.method._find_git_lfs = lambda d: True
+        shutil.rmtree(self.gitdir, ignore_errors=True)
+        fetcher.unpack(self.d.getVar('WORKDIR'))
+
+        # If git-lfs cannot be found, the unpack should throw an error
+        with self.assertRaises(bb.fetch2.FetchError):
+            ud.method._find_git_lfs = lambda d: False
+            shutil.rmtree(self.gitdir, ignore_errors=True)
+            fetcher.unpack(self.d.getVar('WORKDIR'))
+
+    def test_lfs_disabled(self):
+        import shutil
+
+        uri = 'git://%s;protocol=file;subdir=${S};lfs=0' % self.srcdir
+        self.d.setVar('SRC_URI', uri)
+
+        fetcher, ud = self.fetch()
+        self.assertIsNotNone(ud.method._find_git_lfs)
+
+        # If git-lfs can be found, the unpack should be successful
+        ud.method._find_git_lfs = lambda d: True
+        shutil.rmtree(self.gitdir, ignore_errors=True)
+        fetcher.unpack(self.d.getVar('WORKDIR'))
+
+        # If git-lfs cannot be found, the unpack should be successful
+        ud.method._find_git_lfs = lambda d: False
+        shutil.rmtree(self.gitdir, ignore_errors=True)
+        fetcher.unpack(self.d.getVar('WORKDIR'))

Some files were not shown because too many files changed in this diff