summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/fetch2
diff options
context:
space:
mode:
authorJean-Marie LEMETAYER <jean-marie.lemetayer@savoirfairelinux.com>2020-01-24 18:08:13 +0100
committerRichard Purdie <richard.purdie@linuxfoundation.org>2020-01-27 16:48:10 +0000
commite0bd972ba7eaa5f3b0f5d93bf579cd79290d6601 (patch)
tree7d5686799ea1b8740c0da09b7c6279e5c3e3a2c9 /bitbake/lib/bb/fetch2
parent8b9505f1d4b139a71df3c184afa99bfd5bd9bc0b (diff)
downloadpoky-e0bd972ba7eaa5f3b0f5d93bf579cd79290d6601.tar.gz
bitbake: fetch2: add the npmsw fetcher
This commit adds a new npmsw fetcher that fetches every npm dependencies described in a npm shrinkwrap file: https://docs.npmjs.com/files/shrinkwrap.json.html The main package must be fetched separately: SRC_URI = "npm://registry.url;package=foobar;version=1.0.0 \ npmsw://${THISDIR}/npm-shrinkwrap.json" Since a separation has been created between the package and its dependencies, the package can also be fetched with a non npm fetcher without impacting the general behavior: SRC_URI = "git://github.com/foo/bar.git;protocol=https \ npmsw://${THISDIR}/npm-shrinkwrap.json" (Bitbake rev: f5223be54450bf20e0bfbd53b372a7748a44b475) Signed-off-by: Jean-Marie LEMETAYER <jean-marie.lemetayer@savoirfairelinux.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib/bb/fetch2')
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py2
-rw-r--r--bitbake/lib/bb/fetch2/npmsw.py255
2 files changed, 257 insertions, 0 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index 4fe042739d..1b7f573642 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -1894,6 +1894,7 @@ from . import osc
1894from . import repo 1894from . import repo
1895from . import clearcase 1895from . import clearcase
1896from . import npm 1896from . import npm
1897from . import npmsw
1897 1898
1898methods.append(local.Local()) 1899methods.append(local.Local())
1899methods.append(wget.Wget()) 1900methods.append(wget.Wget())
@@ -1912,3 +1913,4 @@ methods.append(osc.Osc())
1912methods.append(repo.Repo()) 1913methods.append(repo.Repo())
1913methods.append(clearcase.ClearCase()) 1914methods.append(clearcase.ClearCase())
1914methods.append(npm.Npm()) 1915methods.append(npm.Npm())
1916methods.append(npmsw.NpmShrinkWrap())
diff --git a/bitbake/lib/bb/fetch2/npmsw.py b/bitbake/lib/bb/fetch2/npmsw.py
new file mode 100644
index 0000000000..0c3511d8ab
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/npmsw.py
@@ -0,0 +1,255 @@
1# Copyright (C) 2020 Savoir-Faire Linux
2#
3# SPDX-License-Identifier: GPL-2.0-only
4#
5"""
6BitBake 'Fetch' npm shrinkwrap implementation
7
8npm fetcher support the SRC_URI with format of:
9SRC_URI = "npmsw://some.registry.url;OptionA=xxx;OptionB=xxx;..."
10
11Supported SRC_URI options are:
12
13- dev
14 Set to 1 to also install devDependencies.
15
16- destsuffix
17 Specifies the directory to use to unpack the dependencies (default: ${S}).
18"""
19
20import json
21import os
22import re
23import bb
24from bb.fetch2 import Fetch
25from bb.fetch2 import FetchMethod
26from bb.fetch2 import ParameterError
27from bb.fetch2 import URI
28from bb.fetch2.npm import npm_integrity
29from bb.fetch2.npm import npm_localfile
30from bb.fetch2.npm import npm_unpack
31from bb.utils import is_semver
32
33def foreach_dependencies(shrinkwrap, callback=None, dev=False):
34 """
35 Run a callback for each dependencies of a shrinkwrap file.
36 The callback is using the format:
37 callback(name, params, deptree)
38 with:
39 name = the package name (string)
40 params = the package parameters (dictionary)
41 deptree = the package dependency tree (array of strings)
42 """
43 def _walk_deps(deps, deptree):
44 for name in deps:
45 subtree = [*deptree, name]
46 _walk_deps(deps[name].get("dependencies", {}), subtree)
47 if callback is not None:
48 if deps[name].get("dev", False) and not dev:
49 continue
50 elif deps[name].get("bundled", False):
51 continue
52 callback(name, deps[name], subtree)
53
54 _walk_deps(shrinkwrap.get("dependencies", {}), [])
55
56class NpmShrinkWrap(FetchMethod):
57 """Class to fetch all package from a shrinkwrap file"""
58
59 def supports(self, ud, d):
60 """Check if a given url can be fetched with npmsw"""
61 return ud.type in ["npmsw"]
62
63 def urldata_init(self, ud, d):
64 """Init npmsw specific variables within url data"""
65
66 # Get the 'shrinkwrap' parameter
67 ud.shrinkwrap_file = re.sub(r"^npmsw://", "", ud.url.split(";")[0])
68
69 # Get the 'dev' parameter
70 ud.dev = bb.utils.to_boolean(ud.parm.get("dev"), False)
71
72 # Resolve the dependencies
73 ud.deps = []
74
75 def _resolve_dependency(name, params, deptree):
76 url = None
77 localpath = None
78 extrapaths = []
79 destsubdirs = [os.path.join("node_modules", dep) for dep in deptree]
80 destsuffix = os.path.join(*destsubdirs)
81
82 integrity = params.get("integrity", None)
83 resolved = params.get("resolved", None)
84 version = params.get("version", None)
85
86 # Handle registry sources
87 if is_semver(version) and resolved and integrity:
88 localfile = npm_localfile(name, version)
89
90 uri = URI(resolved)
91 uri.params["downloadfilename"] = localfile
92
93 checksum_name, checksum_expected = npm_integrity(integrity)
94 uri.params[checksum_name] = checksum_expected
95
96 url = str(uri)
97
98 localpath = os.path.join(d.getVar("DL_DIR"), localfile)
99
100 # Create a resolve file to mimic the npm fetcher and allow
101 # re-usability of the downloaded file.
102 resolvefile = localpath + ".resolved"
103
104 bb.utils.mkdirhier(os.path.dirname(resolvefile))
105 with open(resolvefile, "w") as f:
106 f.write(url)
107
108 extrapaths.append(resolvefile)
109
110 # Handle http tarball sources
111 elif version.startswith("http") and integrity:
112 localfile = os.path.join("npm2", os.path.basename(version))
113
114 uri = URI(version)
115 uri.params["downloadfilename"] = localfile
116
117 checksum_name, checksum_expected = npm_integrity(integrity)
118 uri.params[checksum_name] = checksum_expected
119
120 url = str(uri)
121
122 localpath = os.path.join(d.getVar("DL_DIR"), localfile)
123
124 # Handle git sources
125 elif version.startswith("git"):
126 regex = re.compile(r"""
127 ^
128 git\+
129 (?P<protocol>[a-z]+)
130 ://
131 (?P<url>[^#]+)
132 \#
133 (?P<rev>[0-9a-f]+)
134 $
135 """, re.VERBOSE)
136
137 match = regex.match(version)
138
139 if not match:
140 raise ParameterError("Invalid git url: %s" % version, ud.url)
141
142 groups = match.groupdict()
143
144 uri = URI("git://" + str(groups["url"]))
145 uri.params["protocol"] = str(groups["protocol"])
146 uri.params["rev"] = str(groups["rev"])
147 uri.params["destsuffix"] = destsuffix
148
149 url = str(uri)
150
151 # local tarball sources and local link sources are unsupported
152 else:
153 raise ParameterError("Unsupported dependency: %s" % name, ud.url)
154
155 ud.deps.append({
156 "url": url,
157 "localpath": localpath,
158 "extrapaths": extrapaths,
159 "destsuffix": destsuffix,
160 })
161
162 try:
163 with open(ud.shrinkwrap_file, "r") as f:
164 shrinkwrap = json.load(f)
165 except Exception as e:
166 raise ParameterError("Invalid shrinkwrap file: %s" % str(e), ud.url)
167
168 foreach_dependencies(shrinkwrap, _resolve_dependency, ud.dev)
169
170 # Avoid conflicts between the environment data and:
171 # - the proxy url revision
172 # - the proxy url checksum
173 data = bb.data.createCopy(d)
174 data.delVar("SRCREV")
175 data.delVarFlags("SRC_URI")
176
177 # This fetcher resolves multiple URIs from a shrinkwrap file and then
178 # forwards it to a proxy fetcher. The management of the donestamp file,
179 # the lockfile and the checksums are forwarded to the proxy fetcher.
180 ud.proxy = Fetch([dep["url"] for dep in ud.deps], data)
181 ud.needdonestamp = False
182
183 @staticmethod
184 def _foreach_proxy_method(ud, handle):
185 returns = []
186 for proxy_url in ud.proxy.urls:
187 proxy_ud = ud.proxy.ud[proxy_url]
188 proxy_d = ud.proxy.d
189 proxy_ud.setup_localpath(proxy_d)
190 returns.append(handle(proxy_ud.method, proxy_ud, proxy_d))
191 return returns
192
193 def verify_donestamp(self, ud, d):
194 """Verify the donestamp file"""
195 def _handle(m, ud, d):
196 return m.verify_donestamp(ud, d)
197 return all(self._foreach_proxy_method(ud, _handle))
198
199 def update_donestamp(self, ud, d):
200 """Update the donestamp file"""
201 def _handle(m, ud, d):
202 m.update_donestamp(ud, d)
203 self._foreach_proxy_method(ud, _handle)
204
205 def need_update(self, ud, d):
206 """Force a fetch, even if localpath exists ?"""
207 def _handle(m, ud, d):
208 return m.need_update(ud, d)
209 return all(self._foreach_proxy_method(ud, _handle))
210
211 def try_mirrors(self, fetch, ud, d, mirrors):
212 """Try to use a mirror"""
213 def _handle(m, ud, d):
214 return m.try_mirrors(fetch, ud, d, mirrors)
215 return all(self._foreach_proxy_method(ud, _handle))
216
217 def download(self, ud, d):
218 """Fetch url"""
219 ud.proxy.download()
220
221 def unpack(self, ud, rootdir, d):
222 """Unpack the downloaded dependencies"""
223 destdir = d.getVar("S")
224 destsuffix = ud.parm.get("destsuffix")
225 if destsuffix:
226 destdir = os.path.join(rootdir, destsuffix)
227
228 bb.utils.mkdirhier(destdir)
229 bb.utils.copyfile(ud.shrinkwrap_file,
230 os.path.join(destdir, "npm-shrinkwrap.json"))
231
232 auto = [dep["url"] for dep in ud.deps if not dep["localpath"]]
233 manual = [dep for dep in ud.deps if dep["localpath"]]
234
235 if auto:
236 ud.proxy.unpack(destdir, auto)
237
238 for dep in manual:
239 depdestdir = os.path.join(destdir, dep["destsuffix"])
240 npm_unpack(dep["localpath"], depdestdir, d)
241
242 def clean(self, ud, d):
243 """Clean any existing full or partial download"""
244 ud.proxy.clean()
245
246 # Clean extra files
247 for dep in ud.deps:
248 for path in dep["extrapaths"]:
249 bb.utils.remove(path)
250
251 def done(self, ud, d):
252 """Is the download done ?"""
253 def _handle(m, ud, d):
254 return m.done(ud, d)
255 return all(self._foreach_proxy_method(ud, _handle))