npmsw.py 8.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255
  1. # Copyright (C) 2020 Savoir-Faire Linux
  2. #
  3. # SPDX-License-Identifier: GPL-2.0-only
  4. #
  5. """
  6. BitBake 'Fetch' npm shrinkwrap implementation
  7. npm fetcher support the SRC_URI with format of:
  8. SRC_URI = "npmsw://some.registry.url;OptionA=xxx;OptionB=xxx;..."
  9. Supported SRC_URI options are:
  10. - dev
  11. Set to 1 to also install devDependencies.
  12. - destsuffix
  13. Specifies the directory to use to unpack the dependencies (default: ${S}).
  14. """
  15. import json
  16. import os
  17. import re
  18. import bb
  19. from bb.fetch2 import Fetch
  20. from bb.fetch2 import FetchMethod
  21. from bb.fetch2 import ParameterError
  22. from bb.fetch2 import URI
  23. from bb.fetch2.npm import npm_integrity
  24. from bb.fetch2.npm import npm_localfile
  25. from bb.fetch2.npm import npm_unpack
  26. from bb.utils import is_semver
  27. def foreach_dependencies(shrinkwrap, callback=None, dev=False):
  28. """
  29. Run a callback for each dependencies of a shrinkwrap file.
  30. The callback is using the format:
  31. callback(name, params, deptree)
  32. with:
  33. name = the package name (string)
  34. params = the package parameters (dictionary)
  35. deptree = the package dependency tree (array of strings)
  36. """
  37. def _walk_deps(deps, deptree):
  38. for name in deps:
  39. subtree = [*deptree, name]
  40. _walk_deps(deps[name].get("dependencies", {}), subtree)
  41. if callback is not None:
  42. if deps[name].get("dev", False) and not dev:
  43. continue
  44. elif deps[name].get("bundled", False):
  45. continue
  46. callback(name, deps[name], subtree)
  47. _walk_deps(shrinkwrap.get("dependencies", {}), [])
  48. class NpmShrinkWrap(FetchMethod):
  49. """Class to fetch all package from a shrinkwrap file"""
  50. def supports(self, ud, d):
  51. """Check if a given url can be fetched with npmsw"""
  52. return ud.type in ["npmsw"]
  53. def urldata_init(self, ud, d):
  54. """Init npmsw specific variables within url data"""
  55. # Get the 'shrinkwrap' parameter
  56. ud.shrinkwrap_file = re.sub(r"^npmsw://", "", ud.url.split(";")[0])
  57. # Get the 'dev' parameter
  58. ud.dev = bb.utils.to_boolean(ud.parm.get("dev"), False)
  59. # Resolve the dependencies
  60. ud.deps = []
  61. def _resolve_dependency(name, params, deptree):
  62. url = None
  63. localpath = None
  64. extrapaths = []
  65. destsubdirs = [os.path.join("node_modules", dep) for dep in deptree]
  66. destsuffix = os.path.join(*destsubdirs)
  67. integrity = params.get("integrity", None)
  68. resolved = params.get("resolved", None)
  69. version = params.get("version", None)
  70. # Handle registry sources
  71. if is_semver(version) and resolved and integrity:
  72. localfile = npm_localfile(name, version)
  73. uri = URI(resolved)
  74. uri.params["downloadfilename"] = localfile
  75. checksum_name, checksum_expected = npm_integrity(integrity)
  76. uri.params[checksum_name] = checksum_expected
  77. url = str(uri)
  78. localpath = os.path.join(d.getVar("DL_DIR"), localfile)
  79. # Create a resolve file to mimic the npm fetcher and allow
  80. # re-usability of the downloaded file.
  81. resolvefile = localpath + ".resolved"
  82. bb.utils.mkdirhier(os.path.dirname(resolvefile))
  83. with open(resolvefile, "w") as f:
  84. f.write(url)
  85. extrapaths.append(resolvefile)
  86. # Handle http tarball sources
  87. elif version.startswith("http") and integrity:
  88. localfile = os.path.join("npm2", os.path.basename(version))
  89. uri = URI(version)
  90. uri.params["downloadfilename"] = localfile
  91. checksum_name, checksum_expected = npm_integrity(integrity)
  92. uri.params[checksum_name] = checksum_expected
  93. url = str(uri)
  94. localpath = os.path.join(d.getVar("DL_DIR"), localfile)
  95. # Handle git sources
  96. elif version.startswith("git"):
  97. regex = re.compile(r"""
  98. ^
  99. git\+
  100. (?P<protocol>[a-z]+)
  101. ://
  102. (?P<url>[^#]+)
  103. \#
  104. (?P<rev>[0-9a-f]+)
  105. $
  106. """, re.VERBOSE)
  107. match = regex.match(version)
  108. if not match:
  109. raise ParameterError("Invalid git url: %s" % version, ud.url)
  110. groups = match.groupdict()
  111. uri = URI("git://" + str(groups["url"]))
  112. uri.params["protocol"] = str(groups["protocol"])
  113. uri.params["rev"] = str(groups["rev"])
  114. uri.params["destsuffix"] = destsuffix
  115. url = str(uri)
  116. # local tarball sources and local link sources are unsupported
  117. else:
  118. raise ParameterError("Unsupported dependency: %s" % name, ud.url)
  119. ud.deps.append({
  120. "url": url,
  121. "localpath": localpath,
  122. "extrapaths": extrapaths,
  123. "destsuffix": destsuffix,
  124. })
  125. try:
  126. with open(ud.shrinkwrap_file, "r") as f:
  127. shrinkwrap = json.load(f)
  128. except Exception as e:
  129. raise ParameterError("Invalid shrinkwrap file: %s" % str(e), ud.url)
  130. foreach_dependencies(shrinkwrap, _resolve_dependency, ud.dev)
  131. # Avoid conflicts between the environment data and:
  132. # - the proxy url revision
  133. # - the proxy url checksum
  134. data = bb.data.createCopy(d)
  135. data.delVar("SRCREV")
  136. data.delVarFlags("SRC_URI")
  137. # This fetcher resolves multiple URIs from a shrinkwrap file and then
  138. # forwards it to a proxy fetcher. The management of the donestamp file,
  139. # the lockfile and the checksums are forwarded to the proxy fetcher.
  140. ud.proxy = Fetch([dep["url"] for dep in ud.deps], data)
  141. ud.needdonestamp = False
  142. @staticmethod
  143. def _foreach_proxy_method(ud, handle):
  144. returns = []
  145. for proxy_url in ud.proxy.urls:
  146. proxy_ud = ud.proxy.ud[proxy_url]
  147. proxy_d = ud.proxy.d
  148. proxy_ud.setup_localpath(proxy_d)
  149. returns.append(handle(proxy_ud.method, proxy_ud, proxy_d))
  150. return returns
  151. def verify_donestamp(self, ud, d):
  152. """Verify the donestamp file"""
  153. def _handle(m, ud, d):
  154. return m.verify_donestamp(ud, d)
  155. return all(self._foreach_proxy_method(ud, _handle))
  156. def update_donestamp(self, ud, d):
  157. """Update the donestamp file"""
  158. def _handle(m, ud, d):
  159. m.update_donestamp(ud, d)
  160. self._foreach_proxy_method(ud, _handle)
  161. def need_update(self, ud, d):
  162. """Force a fetch, even if localpath exists ?"""
  163. def _handle(m, ud, d):
  164. return m.need_update(ud, d)
  165. return all(self._foreach_proxy_method(ud, _handle))
  166. def try_mirrors(self, fetch, ud, d, mirrors):
  167. """Try to use a mirror"""
  168. def _handle(m, ud, d):
  169. return m.try_mirrors(fetch, ud, d, mirrors)
  170. return all(self._foreach_proxy_method(ud, _handle))
  171. def download(self, ud, d):
  172. """Fetch url"""
  173. ud.proxy.download()
  174. def unpack(self, ud, rootdir, d):
  175. """Unpack the downloaded dependencies"""
  176. destdir = d.getVar("S")
  177. destsuffix = ud.parm.get("destsuffix")
  178. if destsuffix:
  179. destdir = os.path.join(rootdir, destsuffix)
  180. bb.utils.mkdirhier(destdir)
  181. bb.utils.copyfile(ud.shrinkwrap_file,
  182. os.path.join(destdir, "npm-shrinkwrap.json"))
  183. auto = [dep["url"] for dep in ud.deps if not dep["localpath"]]
  184. manual = [dep for dep in ud.deps if dep["localpath"]]
  185. if auto:
  186. ud.proxy.unpack(destdir, auto)
  187. for dep in manual:
  188. depdestdir = os.path.join(destdir, dep["destsuffix"])
  189. npm_unpack(dep["localpath"], depdestdir, d)
  190. def clean(self, ud, d):
  191. """Clean any existing full or partial download"""
  192. ud.proxy.clean()
  193. # Clean extra files
  194. for dep in ud.deps:
  195. for path in dep["extrapaths"]:
  196. bb.utils.remove(path)
  197. def done(self, ud, d):
  198. """Is the download done ?"""
  199. def _handle(m, ud, d):
  200. return m.done(ud, d)
  201. return all(self._foreach_proxy_method(ud, _handle))