diff options
Diffstat (limited to 'bitbake/lib/bb/fetch2/npm.py')
| -rw-r--r-- | bitbake/lib/bb/fetch2/npm.py | 226 |
1 files changed, 226 insertions, 0 deletions
diff --git a/bitbake/lib/bb/fetch2/npm.py b/bitbake/lib/bb/fetch2/npm.py new file mode 100644 index 0000000000..54cf76df09 --- /dev/null +++ b/bitbake/lib/bb/fetch2/npm.py | |||
| @@ -0,0 +1,226 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake 'Fetch' NPM implementation | ||
| 5 | |||
| 6 | The NPM fetcher is used to retrieve files from the npmjs repository | ||
| 7 | |||
| 8 | Usage in the recipe: | ||
| 9 | |||
| 10 | SRC_URI = "npm://registry.npmjs.org/;name=${PN};version=${PV}" | ||
| 11 | Suported SRC_URI options are: | ||
| 12 | |||
| 13 | - name | ||
| 14 | - version | ||
| 15 | |||
| 16 | npm://registry.npmjs.org/${PN}/-/${PN}-${PV}.tgz would become npm://registry.npmjs.org;name=${PN};ver=${PV} | ||
| 17 | The fetcher all triggers off the existence of ud.localpath. If that exists and has the ".done" stamp, its assumed the fetch is good/done | ||
| 18 | |||
| 19 | """ | ||
| 20 | |||
| 21 | import os | ||
| 22 | import sys | ||
| 23 | import urllib | ||
| 24 | import json | ||
| 25 | import subprocess | ||
| 26 | import signal | ||
| 27 | import bb | ||
| 28 | from bb import data | ||
| 29 | from bb.fetch2 import FetchMethod | ||
| 30 | from bb.fetch2 import FetchError | ||
| 31 | from bb.fetch2 import ChecksumError | ||
| 32 | from bb.fetch2 import runfetchcmd | ||
| 33 | from bb.fetch2 import logger | ||
| 34 | from bb.fetch2 import UnpackError | ||
| 35 | from distutils import spawn | ||
| 36 | |||
| 37 | def subprocess_setup(): | ||
| 38 | # Python installs a SIGPIPE handler by default. This is usually not what | ||
| 39 | # non-Python subprocesses expect. | ||
| 40 | # SIGPIPE errors are known issues with gzip/bash | ||
| 41 | signal.signal(signal.SIGPIPE, signal.SIG_DFL) | ||
| 42 | |||
| 43 | class Npm(FetchMethod): | ||
| 44 | |||
| 45 | """Class to fetch urls via 'npm'""" | ||
| 46 | def init(self, d): | ||
| 47 | pass | ||
| 48 | |||
| 49 | def supports(self, ud, d): | ||
| 50 | """ | ||
| 51 | Check to see if a given url can be fetched with npm | ||
| 52 | """ | ||
| 53 | return ud.type in ['npm'] | ||
| 54 | |||
| 55 | def debug(self, msg): | ||
| 56 | logger.debug(1, "NpmFetch: %s", msg) | ||
| 57 | |||
| 58 | def clean(self, ud, d): | ||
| 59 | logger.debug(2, "Calling cleanup %s" % ud.pkgname) | ||
| 60 | bb.utils.remove(ud.localpath, False) | ||
| 61 | bb.utils.remove(ud.pkgdatadir, True) | ||
| 62 | |||
| 63 | def urldata_init(self, ud, d): | ||
| 64 | """ | ||
| 65 | init NPM specific variable within url data | ||
| 66 | """ | ||
| 67 | if 'downloadfilename' in ud.parm: | ||
| 68 | ud.basename = ud.parm['downloadfilename'] | ||
| 69 | else: | ||
| 70 | ud.basename = os.path.basename(ud.path) | ||
| 71 | |||
| 72 | # can't call it ud.name otherwise fetcher base class will start doing sha1stuff | ||
| 73 | # TODO: find a way to get an sha1/sha256 manifest of pkg & all deps | ||
| 74 | ud.pkgname = ud.parm.get("name", None) | ||
| 75 | if not ud.pkgname: | ||
| 76 | raise ParameterError("NPM fetcher requires a name parameter") | ||
| 77 | ud.version = ud.parm.get("version", None) | ||
| 78 | if not ud.version: | ||
| 79 | raise ParameterError("NPM fetcher requires a version parameter") | ||
| 80 | ud.bbnpmmanifest = "%s-%s.deps.json" % (ud.pkgname, ud.version) | ||
| 81 | ud.registry = "http://%s" % ud.basename | ||
| 82 | prefixdir = "npm/%s" % ud.pkgname | ||
| 83 | ud.pkgdatadir = d.expand("${DL_DIR}/%s" % prefixdir) | ||
| 84 | if not os.path.exists(ud.pkgdatadir): | ||
| 85 | bb.utils.mkdirhiet(ud.pkgdatadir) | ||
| 86 | ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest) | ||
| 87 | |||
| 88 | self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate " | ||
| 89 | self.basecmd += " --directory-prefix=%s " % prefixdir | ||
| 90 | |||
| 91 | def need_update(self, ud, d): | ||
| 92 | if os.path.exists(ud.localpath): | ||
| 93 | return False | ||
| 94 | return True | ||
| 95 | |||
| 96 | def _runwget(self, ud, d, command, quiet): | ||
| 97 | logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command)) | ||
| 98 | bb.fetch2.check_network_access(d, command) | ||
| 99 | runfetchcmd(command, d, quiet) | ||
| 100 | |||
| 101 | def _unpackdep(self, ud, pkg, data, destdir, dldir, d): | ||
| 102 | file = data[pkg]['tgz'] | ||
| 103 | logger.debug(2, "file to extract is %s" % file) | ||
| 104 | if file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'): | ||
| 105 | cmd = 'tar xz --strip 1 --no-same-owner -f %s/%s' % (dldir, file) | ||
| 106 | else: | ||
| 107 | bb.fatal("NPM package %s downloaded not a tarball!" % file) | ||
| 108 | |||
| 109 | # Change to subdir before executing command | ||
| 110 | save_cwd = os.getcwd() | ||
| 111 | if not os.path.exists(destdir): | ||
| 112 | os.makedirs(destdir) | ||
| 113 | os.chdir(destdir) | ||
| 114 | path = d.getVar('PATH', True) | ||
| 115 | if path: | ||
| 116 | cmd = "PATH=\"%s\" %s" % (path, cmd) | ||
| 117 | bb.note("Unpacking %s to %s/" % (file, os.getcwd())) | ||
| 118 | ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True) | ||
| 119 | os.chdir(save_cwd) | ||
| 120 | |||
| 121 | if ret != 0: | ||
| 122 | raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url) | ||
| 123 | |||
| 124 | if 'deps' not in data[pkg]: | ||
| 125 | return | ||
| 126 | for dep in data[pkg]['deps']: | ||
| 127 | self._unpackdep(ud, dep, data[pkg]['deps'], "%s/node_modules/%s" % (destdir, dep), dldir, d) | ||
| 128 | |||
| 129 | |||
| 130 | def unpack(self, ud, destdir, d): | ||
| 131 | dldir = d.getVar("DL_DIR", True) | ||
| 132 | depdumpfile = "%s-%s.deps.json" % (ud.pkgname, ud.version) | ||
| 133 | with open("%s/npm/%s" % (dldir, depdumpfile)) as datafile: | ||
| 134 | workobj = json.load(datafile) | ||
| 135 | dldir = "%s/%s" % (os.path.dirname(ud.localpath), ud.pkgname) | ||
| 136 | |||
| 137 | self._unpackdep(ud, ud.pkgname, workobj, "%s/npmpkg" % destdir, dldir, d) | ||
| 138 | |||
| 139 | def _getdependencies(self, pkg, data, version, d, ud): | ||
| 140 | pkgfullname = pkg | ||
| 141 | if version: | ||
| 142 | pkgfullname += "@%s" % version | ||
| 143 | logger.debug(2, "Calling getdeps on %s" % pkg) | ||
| 144 | fetchcmd = "npm view %s dist.tarball --registry %s" % (pkgfullname, ud.registry) | ||
| 145 | output = runfetchcmd(fetchcmd, d, True) | ||
| 146 | # npm may resolve multiple versions | ||
| 147 | outputarray = output.strip().splitlines() | ||
| 148 | # we just take the latest version npm resolved | ||
| 149 | #logger.debug(2, "Output URL is %s - %s - %s" % (ud.basepath, ud.basename, ud.localfile)) | ||
| 150 | outputurl = outputarray[len(outputarray)-1].rstrip() | ||
| 151 | if (len(outputarray) > 1): | ||
| 152 | # remove the preceding version/name from npm output and then the | ||
| 153 | # first and last quotes | ||
| 154 | outputurl = outputurl.split(" ")[1][1:-1] | ||
| 155 | data[pkg] = {} | ||
| 156 | data[pkg]['tgz'] = os.path.basename(outputurl) | ||
| 157 | self._runwget(ud, d, "%s %s" % (self.basecmd, outputurl), False) | ||
| 158 | #fetchcmd = "npm view %s@%s dependencies --json" % (pkg, version) | ||
| 159 | fetchcmd = "npm view %s dependencies --json --registry %s" % (pkgfullname, ud.registry) | ||
| 160 | output = runfetchcmd(fetchcmd, d, True) | ||
| 161 | try: | ||
| 162 | depsfound = json.loads(output) | ||
| 163 | except: | ||
| 164 | # just assume there is no deps to be loaded here | ||
| 165 | return | ||
| 166 | data[pkg]['deps'] = {} | ||
| 167 | for dep, version in depsfound.iteritems(): | ||
| 168 | self._getdependencies(dep, data[pkg]['deps'], version, d, ud) | ||
| 169 | |||
| 170 | def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest): | ||
| 171 | logger.debug(2, "NPM shrinkwrap file is %s" % data) | ||
| 172 | outputurl = "invalid" | ||
| 173 | if ('resolved' not in data): | ||
| 174 | # will be the case for ${PN} | ||
| 175 | fetchcmd = "npm view %s@%s dist.tarball --registry %s" % (pkg, version, ud.registry) | ||
| 176 | logger.debug(2, "Found this matching URL: %s" % str(fetchcmd)) | ||
| 177 | outputurl = runfetchcmd(fetchcmd, d, True) | ||
| 178 | else: | ||
| 179 | outputurl = data['resolved'] | ||
| 180 | self._runwget(ud, d, "%s %s" % (self.basecmd, outputurl), False) | ||
| 181 | manifest[pkg] = {} | ||
| 182 | manifest[pkg]['tgz'] = os.path.basename(outputurl).rstrip() | ||
| 183 | manifest[pkg]['deps'] = {} | ||
| 184 | |||
| 185 | if pkg in lockdown: | ||
| 186 | sha1_expected = lockdown[pkg][version] | ||
| 187 | sha1_data = bb.utils.sha1_file("npm/%s/%s" % (ud.pkgname, manifest[pkg]['tgz'])) | ||
| 188 | if sha1_expected != sha1_data: | ||
| 189 | msg = "\nFile: '%s' has %s checksum %s when %s was expected" % (manifest[pkg]['tgz'], 'sha1', sha1_data, sha1_expected) | ||
| 190 | raise ChecksumError('Checksum mismatch!%s' % msg) | ||
| 191 | else: | ||
| 192 | logger.debug(2, "No lockdown data for %s@%s" % (pkg, version)) | ||
| 193 | |||
| 194 | if 'dependencies' in data: | ||
| 195 | for obj in data['dependencies']: | ||
| 196 | logger.debug(2, "Found dep is %s" % str(obj)) | ||
| 197 | self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest[pkg]['deps']) | ||
| 198 | |||
| 199 | def download(self, ud, d): | ||
| 200 | """Fetch url""" | ||
| 201 | jsondepobj = {} | ||
| 202 | shrinkobj = {} | ||
| 203 | lockdown = {} | ||
| 204 | |||
| 205 | shwrf = d.getVar('NPM_SHRINKWRAP', True) | ||
| 206 | logger.debug(2, "NPM shrinkwrap file is %s" % shwrf) | ||
| 207 | try: | ||
| 208 | with open(shwrf) as datafile: | ||
| 209 | shrinkobj = json.load(datafile) | ||
| 210 | except: | ||
| 211 | logger.warn('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname) | ||
| 212 | lckdf = d.getVar('NPM_LOCKDOWN', True) | ||
| 213 | logger.debug(2, "NPM lockdown file is %s" % lckdf) | ||
| 214 | try: | ||
| 215 | with open(lckdf) as datafile: | ||
| 216 | lockdown = json.load(datafile) | ||
| 217 | except: | ||
| 218 | logger.warn('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname) | ||
| 219 | |||
| 220 | if ('name' not in shrinkobj): | ||
| 221 | self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud) | ||
| 222 | else: | ||
| 223 | self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d, ud, lockdown, jsondepobj) | ||
| 224 | |||
| 225 | with open(ud.localpath, 'w') as outfile: | ||
| 226 | json.dump(jsondepobj, outfile) | ||
