summaryrefslogtreecommitdiffstats
path: root/bitbake
diff options
context:
space:
mode:
authorJean-Marie LEMETAYER <jean-marie.lemetayer@savoirfairelinux.com>2020-01-24 18:08:11 +0100
committerRichard Purdie <richard.purdie@linuxfoundation.org>2020-01-27 16:48:10 +0000
commitd842e9e738e14fc54c21434755033d6eb6b6f3f2 (patch)
tree4d65e83f4dc962b38c2d7eb92959de5419a4be24 /bitbake
parentb9ce68a5d7beca817d0370e89b178dbc9ff6ccbc (diff)
downloadpoky-d842e9e738e14fc54c21434755033d6eb6b6f3f2.tar.gz
bitbake: fetch2/npm: refactor the npm fetcher
This commit refactors the npm fetcher to improve some points and fix others: - The big change is that the fetcher is only fetching the package source and no more the dependencies. Thus the npm fetcher act as the other fetchers e.g git, wget. The dependencies will be handled later. - The fetcher only resolves the url of the package using 'npm view' and then forwards it to a proxy fetcher. - This commit also fixes a lot of issues with the package names (exotic characters, scoped packages) which were badly handled. - The validation files - lockdown.json and npm-shrinkwrap.json - are no longer used by the fetcher. Instead, the downloaded tarball is verified with the 'integrity' and 'shasum' provided in the 'npm view' of the package [1][2]. 1: https://docs.npmjs.com/files/package-lock.json#integrity 2: https://www.w3.org/TR/SRI (Bitbake rev: 0f451cdc43130d503ada53ed1b4fc5a24943f6ef) Signed-off-by: Jean-Marie LEMETAYER <jean-marie.lemetayer@savoirfairelinux.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake')
-rw-r--r--bitbake/lib/bb/fetch2/npm.py538
1 files changed, 267 insertions, 271 deletions
diff --git a/bitbake/lib/bb/fetch2/npm.py b/bitbake/lib/bb/fetch2/npm.py
index be21399e59..47898509ff 100644
--- a/bitbake/lib/bb/fetch2/npm.py
+++ b/bitbake/lib/bb/fetch2/npm.py
@@ -1,300 +1,296 @@
1# Copyright (C) 2020 Savoir-Faire Linux
1# 2#
2# SPDX-License-Identifier: GPL-2.0-only 3# SPDX-License-Identifier: GPL-2.0-only
3# 4#
4""" 5"""
5BitBake 'Fetch' NPM implementation 6BitBake 'Fetch' npm implementation
6 7
7The NPM fetcher is used to retrieve files from the npmjs repository 8npm fetcher support the SRC_URI with format of:
9SRC_URI = "npm://some.registry.url;OptionA=xxx;OptionB=xxx;..."
8 10
9Usage in the recipe: 11Supported SRC_URI options are:
10 12
11 SRC_URI = "npm://registry.npmjs.org/;name=${PN};version=${PV}" 13- package
12 Suported SRC_URI options are: 14 The npm package name. This is a mandatory parameter.
13 15
14 - name 16- version
15 - version 17 The npm package version. This is a mandatory parameter.
16 18
17 npm://registry.npmjs.org/${PN}/-/${PN}-${PV}.tgz would become npm://registry.npmjs.org;name=${PN};version=${PV} 19- downloadfilename
18 The fetcher all triggers off the existence of ud.localpath. If that exists and has the ".done" stamp, its assumed the fetch is good/done 20 Specifies the filename used when storing the downloaded file.
19 21
22- destsuffix
23 Specifies the directory to use to unpack the package (default: npm).
20""" 24"""
21 25
22import os 26import base64
23import urllib.request, urllib.parse, urllib.error
24import json 27import json
25import subprocess 28import os
26import signal 29import re
30import shlex
31import tempfile
27import bb 32import bb
28from bb.fetch2 import FetchMethod 33from bb.fetch2 import Fetch
29from bb.fetch2 import FetchError 34from bb.fetch2 import FetchError
30from bb.fetch2 import ChecksumError 35from bb.fetch2 import FetchMethod
31from bb.fetch2 import runfetchcmd 36from bb.fetch2 import MissingParameterError
32from bb.fetch2 import logger 37from bb.fetch2 import ParameterError
33from bb.fetch2 import UnpackError 38from bb.fetch2 import URI
34from bb.fetch2 import ParameterError 39from bb.fetch2 import check_network_access
35 40from bb.fetch2 import runfetchcmd
36def subprocess_setup(): 41from bb.utils import is_semver
37 # Python installs a SIGPIPE handler by default. This is usually not what 42
38 # non-Python subprocesses expect. 43def npm_package(package):
39 # SIGPIPE errors are known issues with gzip/bash 44 """Convert the npm package name to remove unsupported character"""
40 signal.signal(signal.SIGPIPE, signal.SIG_DFL) 45 # Scoped package names (with the @) use the same naming convention
46 # as the 'npm pack' command.
47 if package.startswith("@"):
48 return re.sub("/", "-", package[1:])
49 return package
50
51def npm_filename(package, version):
52 """Get the filename of a npm package"""
53 return npm_package(package) + "-" + version + ".tgz"
54
55def npm_localfile(package, version):
56 """Get the local filename of a npm package"""
57 return os.path.join("npm2", npm_filename(package, version))
58
59def npm_integrity(integrity):
60 """
61 Get the checksum name and expected value from the subresource integrity
62 https://www.w3.org/TR/SRI/
63 """
64 algo, value = integrity.split("-", maxsplit=1)
65 return "%ssum" % algo, base64.b64decode(value).hex()
66
67def npm_unpack(tarball, destdir, d):
68 """Unpack a npm tarball"""
69 bb.utils.mkdirhier(destdir)
70 cmd = "tar --extract --gzip --file=%s" % shlex.quote(tarball)
71 cmd += " --no-same-owner"
72 cmd += " --strip-components=1"
73 runfetchcmd(cmd, d, workdir=destdir)
74
75class NpmEnvironment(object):
76 """
77 Using a npm config file seems more reliable than using cli arguments.
78 This class allows to create a controlled environment for npm commands.
79 """
80 def __init__(self, d, configs=None):
81 self.d = d
82 self.configs = configs
83
84 def run(self, cmd, args=None, configs=None, workdir=None):
85 """Run npm command in a controlled environment"""
86 with tempfile.TemporaryDirectory() as tmpdir:
87 d = bb.data.createCopy(self.d)
88 d.setVar("HOME", tmpdir)
89
90 cfgfile = os.path.join(tmpdir, "npmrc")
91
92 if not workdir:
93 workdir = tmpdir
94
95 def _run(cmd):
96 cmd = "NPM_CONFIG_USERCONFIG=%s " % cfgfile + cmd
97 cmd = "NPM_CONFIG_GLOBALCONFIG=%s " % cfgfile + cmd
98 return runfetchcmd(cmd, d, workdir=workdir)
99
100 if self.configs:
101 for key, value in self.configs:
102 _run("npm config set %s %s" % (key, shlex.quote(value)))
103
104 if configs:
105 for key, value in configs:
106 _run("npm config set %s %s" % (key, shlex.quote(value)))
107
108 if args:
109 for key, value in args:
110 cmd += " --%s=%s" % (key, shlex.quote(value))
111
112 return _run(cmd)
41 113
42class Npm(FetchMethod): 114class Npm(FetchMethod):
43 115 """Class to fetch a package from a npm registry"""
44 """Class to fetch urls via 'npm'"""
45 def init(self, d):
46 pass
47 116
48 def supports(self, ud, d): 117 def supports(self, ud, d):
49 """ 118 """Check if a given url can be fetched with npm"""
50 Check to see if a given url can be fetched with npm 119 return ud.type in ["npm"]
51 """ 120
52 return ud.type in ['npm'] 121 def urldata_init(self, ud, d):
122 """Init npm specific variables within url data"""
123 ud.package = None
124 ud.version = None
125 ud.registry = None
53 126
54 def debug(self, msg): 127 # Get the 'package' parameter
55 logger.debug(1, "NpmFetch: %s", msg) 128 if "package" in ud.parm:
129 ud.package = ud.parm.get("package")
56 130
57 def clean(self, ud, d): 131 if not ud.package:
58 logger.debug(2, "Calling cleanup %s" % ud.pkgname) 132 raise MissingParameterError("Parameter 'package' required", ud.url)
59 bb.utils.remove(ud.localpath, False) 133
60 bb.utils.remove(ud.pkgdatadir, True) 134 # Get the 'version' parameter
61 bb.utils.remove(ud.fullmirror, False) 135 if "version" in ud.parm:
136 ud.version = ud.parm.get("version")
62 137
63 def urldata_init(self, ud, d):
64 """
65 init NPM specific variable within url data
66 """
67 if 'downloadfilename' in ud.parm:
68 ud.basename = ud.parm['downloadfilename']
69 else:
70 ud.basename = os.path.basename(ud.path)
71
72 # can't call it ud.name otherwise fetcher base class will start doing sha1stuff
73 # TODO: find a way to get an sha1/sha256 manifest of pkg & all deps
74 ud.pkgname = ud.parm.get("name", None)
75 if not ud.pkgname:
76 raise ParameterError("NPM fetcher requires a name parameter", ud.url)
77 ud.version = ud.parm.get("version", None)
78 if not ud.version: 138 if not ud.version:
79 raise ParameterError("NPM fetcher requires a version parameter", ud.url) 139 raise MissingParameterError("Parameter 'version' required", ud.url)
80 ud.bbnpmmanifest = "%s-%s.deps.json" % (ud.pkgname, ud.version)
81 ud.bbnpmmanifest = ud.bbnpmmanifest.replace('/', '-')
82 ud.registry = "http://%s" % (ud.url.replace('npm://', '', 1).split(';'))[0]
83 prefixdir = "npm/%s" % ud.pkgname
84 ud.pkgdatadir = d.expand("${DL_DIR}/%s" % prefixdir)
85 if not os.path.exists(ud.pkgdatadir):
86 bb.utils.mkdirhier(ud.pkgdatadir)
87 ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest)
88
89 self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate "
90 ud.prefixdir = prefixdir
91
92 ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0") != "0")
93 mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version)
94 mirrortarball = mirrortarball.replace('/', '-')
95 ud.fullmirror = os.path.join(d.getVar("DL_DIR"), mirrortarball)
96 ud.mirrortarballs = [mirrortarball]
97 140
98 def need_update(self, ud, d): 141 if not is_semver(ud.version) and not ud.version == "latest":
99 if os.path.exists(ud.localpath): 142 raise ParameterError("Invalid 'version' parameter", ud.url)
100 return False 143
101 return True 144 # Extract the 'registry' part of the url
102 145 ud.registry = re.sub(r"^npm://", "http://", ud.url.split(";")[0])
103 def _runpack(self, ud, d, pkgfullname: str, quiet=False) -> str: 146
104 """ 147 # Using the 'downloadfilename' parameter as local filename
105 Runs npm pack on a full package name. 148 # or the npm package name.
106 Returns the filename of the downloaded package 149 if "downloadfilename" in ud.parm:
107 """ 150 ud.localfile = d.expand(ud.parm["downloadfilename"])
108 bb.fetch2.check_network_access(d, pkgfullname, ud.registry)
109 dldir = d.getVar("DL_DIR")
110 dldir = os.path.join(dldir, ud.prefixdir)
111
112 command = "npm pack {} --registry {}".format(pkgfullname, ud.registry)
113 logger.debug(2, "Fetching {} using command '{}' in {}".format(pkgfullname, command, dldir))
114 filename = runfetchcmd(command, d, quiet, workdir=dldir)
115 return filename.rstrip()
116
117 def _unpackdep(self, ud, pkg, data, destdir, dldir, d):
118 file = data[pkg]['tgz']
119 logger.debug(2, "file to extract is %s" % file)
120 if file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
121 cmd = 'tar xz --strip 1 --no-same-owner --warning=no-unknown-keyword -f %s/%s' % (dldir, file)
122 else:
123 bb.fatal("NPM package %s downloaded not a tarball!" % file)
124
125 # Change to subdir before executing command
126 if not os.path.exists(destdir):
127 os.makedirs(destdir)
128 path = d.getVar('PATH')
129 if path:
130 cmd = "PATH=\"%s\" %s" % (path, cmd)
131 bb.note("Unpacking %s to %s/" % (file, destdir))
132 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=destdir)
133
134 if ret != 0:
135 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url)
136
137 if 'deps' not in data[pkg]:
138 return
139 for dep in data[pkg]['deps']:
140 self._unpackdep(ud, dep, data[pkg]['deps'], "%s/node_modules/%s" % (destdir, dep), dldir, d)
141
142
143 def unpack(self, ud, destdir, d):
144 dldir = d.getVar("DL_DIR")
145 with open("%s/npm/%s" % (dldir, ud.bbnpmmanifest)) as datafile:
146 workobj = json.load(datafile)
147 dldir = "%s/%s" % (os.path.dirname(ud.localpath), ud.pkgname)
148
149 if 'subdir' in ud.parm:
150 unpackdir = '%s/%s' % (destdir, ud.parm.get('subdir'))
151 else: 151 else:
152 unpackdir = '%s/npmpkg' % destdir 152 ud.localfile = npm_localfile(ud.package, ud.version)
153 153
154 self._unpackdep(ud, ud.pkgname, workobj, unpackdir, dldir, d) 154 # Get the base 'npm' command
155 155 ud.basecmd = d.getVar("FETCHCMD_npm") or "npm"
156 def _parse_view(self, output): 156
157 ''' 157 # This fetcher resolves a URI from a npm package name and version and
158 Parse the output of npm view --json; the last JSON result 158 # then forwards it to a proxy fetcher. A resolve file containing the
159 is assumed to be the one that we're interested in. 159 # resolved URI is created to avoid unwanted network access (if the file
160 ''' 160 # already exists). The management of the donestamp file, the lockfile
161 pdata = json.loads(output); 161 # and the checksums are forwarded to the proxy fetcher.
162 try: 162 ud.proxy = None
163 return pdata[-1] 163 ud.needdonestamp = False
164 except: 164 ud.resolvefile = self.localpath(ud, d) + ".resolved"
165 return pdata 165
166 166 def _resolve_proxy_url(self, ud, d):
167 def _getdependencies(self, pkg, data, version, d, ud, optional=False, fetchedlist=None): 167 def _npm_view():
168 if fetchedlist is None: 168 configs = []
169 fetchedlist = [] 169 configs.append(("json", "true"))
170 pkgfullname = pkg 170 configs.append(("registry", ud.registry))
171 if version != '*' and not '/' in version: 171 pkgver = shlex.quote(ud.package + "@" + ud.version)
172 pkgfullname += "@'%s'" % version 172 cmd = ud.basecmd + " view %s" % pkgver
173 if pkgfullname in fetchedlist: 173 env = NpmEnvironment(d)
174 return 174 check_network_access(d, cmd, ud.registry)
175 175 view_string = env.run(cmd, configs=configs)
176 logger.debug(2, "Calling getdeps on %s" % pkg) 176
177 fetchcmd = "npm view %s --json --registry %s" % (pkgfullname, ud.registry) 177 if not view_string:
178 output = runfetchcmd(fetchcmd, d, True) 178 raise FetchError("Unavailable package %s" % pkgver, ud.url)
179 pdata = self._parse_view(output) 179
180 if not pdata: 180 try:
181 raise FetchError("The command '%s' returned no output" % fetchcmd) 181 view = json.loads(view_string)
182 if optional: 182
183 pkg_os = pdata.get('os', None) 183 error = view.get("error")
184 if pkg_os: 184 if error is not None:
185 if not isinstance(pkg_os, list): 185 raise FetchError(error.get("summary"), ud.url)
186 pkg_os = [pkg_os] 186
187 blacklist = False 187 if ud.version == "latest":
188 for item in pkg_os: 188 bb.warn("The npm package %s is using the latest " \
189 if item.startswith('!'): 189 "version available. This could lead to " \
190 blacklist = True 190 "non-reproducible builds." % pkgver)
191 break 191 elif ud.version != view.get("version"):
192 if (not blacklist and 'linux' not in pkg_os) or '!linux' in pkg_os: 192 raise ParameterError("Invalid 'version' parameter", ud.url)
193 logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg) 193
194 return 194 return view
195 filename = self._runpack(ud, d, pkgfullname) 195
196 data[pkg] = {} 196 except Exception as e:
197 data[pkg]['tgz'] = filename 197 raise FetchError("Invalid view from npm: %s" % str(e), ud.url)
198 fetchedlist.append(pkgfullname) 198
199 199 def _get_url(view):
200 dependencies = pdata.get('dependencies', {}) 200 tarball_url = view.get("dist", {}).get("tarball")
201 optionalDependencies = pdata.get('optionalDependencies', {}) 201
202 dependencies.update(optionalDependencies) 202 if tarball_url is None:
203 depsfound = {} 203 raise FetchError("Invalid 'dist.tarball' in view", ud.url)
204 optdepsfound = {} 204
205 data[pkg]['deps'] = {} 205 uri = URI(tarball_url)
206 for dep in dependencies: 206 uri.params["downloadfilename"] = ud.localfile
207 if dep in optionalDependencies: 207
208 optdepsfound[dep] = dependencies[dep] 208 integrity = view.get("dist", {}).get("integrity")
209 shasum = view.get("dist", {}).get("shasum")
210
211 if integrity is not None:
212 checksum_name, checksum_expected = npm_integrity(integrity)
213 uri.params[checksum_name] = checksum_expected
214 elif shasum is not None:
215 uri.params["sha1sum"] = shasum
209 else: 216 else:
210 depsfound[dep] = dependencies[dep] 217 raise FetchError("Invalid 'dist.integrity' in view", ud.url)
211 for dep, version in optdepsfound.items():
212 self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True, fetchedlist=fetchedlist)
213 for dep, version in depsfound.items():
214 self._getdependencies(dep, data[pkg]['deps'], version, d, ud, fetchedlist=fetchedlist)
215
216 def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest, toplevel=True):
217 logger.debug(2, "NPM shrinkwrap file is %s" % data)
218 if toplevel:
219 name = data.get('name', None)
220 if name and name != pkg:
221 for obj in data.get('dependencies', []):
222 if obj == pkg:
223 self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest, False)
224 return
225
226 pkgnameWithVersion = "{}@{}".format(pkg, version)
227 logger.debug(2, "Get dependencies for {}".format(pkgnameWithVersion))
228 filename = self._runpack(ud, d, pkgnameWithVersion)
229 manifest[pkg] = {}
230 manifest[pkg]['tgz'] = filename
231 manifest[pkg]['deps'] = {}
232
233 if pkg in lockdown:
234 sha1_expected = lockdown[pkg][version]
235 sha1_data = bb.utils.sha1_file("npm/%s/%s" % (ud.pkgname, manifest[pkg]['tgz']))
236 if sha1_expected != sha1_data:
237 msg = "\nFile: '%s' has %s checksum %s when %s was expected" % (manifest[pkg]['tgz'], 'sha1', sha1_data, sha1_expected)
238 raise ChecksumError('Checksum mismatch!%s' % msg)
239 else:
240 logger.debug(2, "No lockdown data for %s@%s" % (pkg, version))
241 218
242 if 'dependencies' in data: 219 return str(uri)
243 for obj in data['dependencies']: 220
244 logger.debug(2, "Found dep is %s" % str(obj)) 221 url = _get_url(_npm_view())
245 self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest[pkg]['deps'], False) 222
223 bb.utils.mkdirhier(os.path.dirname(ud.resolvefile))
224 with open(ud.resolvefile, "w") as f:
225 f.write(url)
226
227 def _setup_proxy(self, ud, d):
228 if ud.proxy is None:
229 if not os.path.exists(ud.resolvefile):
230 self._resolve_proxy_url(ud, d)
231
232 with open(ud.resolvefile, "r") as f:
233 url = f.read()
234
235 # Avoid conflicts between the environment data and:
236 # - the proxy url checksum
237 data = bb.data.createCopy(d)
238 data.delVarFlags("SRC_URI")
239 ud.proxy = Fetch([url], data)
240
241 def _get_proxy_method(self, ud, d):
242 self._setup_proxy(ud, d)
243 proxy_url = ud.proxy.urls[0]
244 proxy_ud = ud.proxy.ud[proxy_url]
245 proxy_d = ud.proxy.d
246 proxy_ud.setup_localpath(proxy_d)
247 return proxy_ud.method, proxy_ud, proxy_d
248
249 def verify_donestamp(self, ud, d):
250 """Verify the donestamp file"""
251 proxy_m, proxy_ud, proxy_d = self._get_proxy_method(ud, d)
252 return proxy_m.verify_donestamp(proxy_ud, proxy_d)
253
254 def update_donestamp(self, ud, d):
255 """Update the donestamp file"""
256 proxy_m, proxy_ud, proxy_d = self._get_proxy_method(ud, d)
257 proxy_m.update_donestamp(proxy_ud, proxy_d)
258
259 def need_update(self, ud, d):
260 """Force a fetch, even if localpath exists ?"""
261 if not os.path.exists(ud.resolvefile):
262 return True
263 if ud.version == "latest":
264 return True
265 proxy_m, proxy_ud, proxy_d = self._get_proxy_method(ud, d)
266 return proxy_m.need_update(proxy_ud, proxy_d)
267
268 def try_mirrors(self, fetch, ud, d, mirrors):
269 """Try to use a mirror"""
270 proxy_m, proxy_ud, proxy_d = self._get_proxy_method(ud, d)
271 return proxy_m.try_mirrors(fetch, proxy_ud, proxy_d, mirrors)
246 272
247 def download(self, ud, d): 273 def download(self, ud, d):
248 """Fetch url""" 274 """Fetch url"""
249 jsondepobj = {} 275 self._setup_proxy(ud, d)
250 shrinkobj = {} 276 ud.proxy.download()
251 lockdown = {} 277
252 278 def unpack(self, ud, rootdir, d):
253 if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror): 279 """Unpack the downloaded archive"""
254 dest = d.getVar("DL_DIR") 280 destsuffix = ud.parm.get("destsuffix", "npm")
255 bb.utils.mkdirhier(dest) 281 destdir = os.path.join(rootdir, destsuffix)
256 runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest) 282 npm_unpack(ud.localpath, destdir, d)
257 return 283
258 284 def clean(self, ud, d):
259 if ud.parm.get("noverify", None) != '1': 285 """Clean any existing full or partial download"""
260 shwrf = d.getVar('NPM_SHRINKWRAP') 286 if os.path.exists(ud.resolvefile):
261 logger.debug(2, "NPM shrinkwrap file is %s" % shwrf) 287 self._setup_proxy(ud, d)
262 if shwrf: 288 ud.proxy.clean()
263 try: 289 bb.utils.remove(ud.resolvefile)
264 with open(shwrf) as datafile: 290
265 shrinkobj = json.load(datafile) 291 def done(self, ud, d):
266 except Exception as e: 292 """Is the download done ?"""
267 raise FetchError('Error loading NPM_SHRINKWRAP file "%s" for %s: %s' % (shwrf, ud.pkgname, str(e))) 293 if not os.path.exists(ud.resolvefile):
268 elif not ud.ignore_checksums: 294 return False
269 logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname) 295 proxy_m, proxy_ud, proxy_d = self._get_proxy_method(ud, d)
270 lckdf = d.getVar('NPM_LOCKDOWN') 296 return proxy_m.done(proxy_ud, proxy_d)
271 logger.debug(2, "NPM lockdown file is %s" % lckdf)
272 if lckdf:
273 try:
274 with open(lckdf) as datafile:
275 lockdown = json.load(datafile)
276 except Exception as e:
277 raise FetchError('Error loading NPM_LOCKDOWN file "%s" for %s: %s' % (lckdf, ud.pkgname, str(e)))
278 elif not ud.ignore_checksums:
279 logger.warning('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)
280
281 if ('name' not in shrinkobj):
282 self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud)
283 else:
284 self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d, ud, lockdown, jsondepobj)
285
286 with open(ud.localpath, 'w') as outfile:
287 json.dump(jsondepobj, outfile)
288
289 def build_mirror_data(self, ud, d):
290 # Generate a mirror tarball if needed
291 if ud.write_tarballs and not os.path.exists(ud.fullmirror):
292 # it's possible that this symlink points to read-only filesystem with PREMIRROR
293 if os.path.islink(ud.fullmirror):
294 os.unlink(ud.fullmirror)
295
296 dldir = d.getVar("DL_DIR")
297 logger.info("Creating tarball of npm data")
298 runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d,
299 workdir=dldir)
300 runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=dldir)