Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 1 | # ex:ts=4:sw=4:sts=4:et |
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- |
| 3 | """ |
| 4 | BitBake 'Fetch' NPM implementation |
| 5 | |
| 6 | The NPM fetcher is used to retrieve files from the npmjs repository |
| 7 | |
| 8 | Usage in the recipe: |
| 9 | |
| 10 | SRC_URI = "npm://registry.npmjs.org/;name=${PN};version=${PV}" |
| 11 | Suported SRC_URI options are: |
| 12 | |
| 13 | - name |
| 14 | - version |
| 15 | |
| 16 | npm://registry.npmjs.org/${PN}/-/${PN}-${PV}.tgz would become npm://registry.npmjs.org;name=${PN};ver=${PV} |
| 17 | The fetcher all triggers off the existence of ud.localpath. If that exists and has the ".done" stamp, its assumed the fetch is good/done |
| 18 | |
| 19 | """ |
| 20 | |
| 21 | import os |
| 22 | import sys |
| 23 | import urllib |
| 24 | import json |
| 25 | import subprocess |
| 26 | import signal |
| 27 | import bb |
| 28 | from bb import data |
| 29 | from bb.fetch2 import FetchMethod |
| 30 | from bb.fetch2 import FetchError |
| 31 | from bb.fetch2 import ChecksumError |
| 32 | from bb.fetch2 import runfetchcmd |
| 33 | from bb.fetch2 import logger |
| 34 | from bb.fetch2 import UnpackError |
| 35 | from bb.fetch2 import ParameterError |
| 36 | from distutils import spawn |
| 37 | |
| 38 | def subprocess_setup(): |
| 39 | # Python installs a SIGPIPE handler by default. This is usually not what |
| 40 | # non-Python subprocesses expect. |
| 41 | # SIGPIPE errors are known issues with gzip/bash |
| 42 | signal.signal(signal.SIGPIPE, signal.SIG_DFL) |
| 43 | |
| 44 | class Npm(FetchMethod): |
| 45 | |
| 46 | """Class to fetch urls via 'npm'""" |
| 47 | def init(self, d): |
| 48 | pass |
| 49 | |
| 50 | def supports(self, ud, d): |
| 51 | """ |
| 52 | Check to see if a given url can be fetched with npm |
| 53 | """ |
| 54 | return ud.type in ['npm'] |
| 55 | |
| 56 | def debug(self, msg): |
| 57 | logger.debug(1, "NpmFetch: %s", msg) |
| 58 | |
| 59 | def clean(self, ud, d): |
| 60 | logger.debug(2, "Calling cleanup %s" % ud.pkgname) |
| 61 | bb.utils.remove(ud.localpath, False) |
| 62 | bb.utils.remove(ud.pkgdatadir, True) |
| 63 | bb.utils.remove(ud.fullmirror, False) |
| 64 | |
| 65 | def urldata_init(self, ud, d): |
| 66 | """ |
| 67 | init NPM specific variable within url data |
| 68 | """ |
| 69 | if 'downloadfilename' in ud.parm: |
| 70 | ud.basename = ud.parm['downloadfilename'] |
| 71 | else: |
| 72 | ud.basename = os.path.basename(ud.path) |
| 73 | |
| 74 | # can't call it ud.name otherwise fetcher base class will start doing sha1stuff |
| 75 | # TODO: find a way to get an sha1/sha256 manifest of pkg & all deps |
| 76 | ud.pkgname = ud.parm.get("name", None) |
| 77 | if not ud.pkgname: |
| 78 | raise ParameterError("NPM fetcher requires a name parameter", ud.url) |
| 79 | ud.version = ud.parm.get("version", None) |
| 80 | if not ud.version: |
| 81 | raise ParameterError("NPM fetcher requires a version parameter", ud.url) |
| 82 | ud.bbnpmmanifest = "%s-%s.deps.json" % (ud.pkgname, ud.version) |
| 83 | ud.registry = "http://%s" % (ud.url.replace('npm://', '', 1).split(';'))[0] |
| 84 | prefixdir = "npm/%s" % ud.pkgname |
| 85 | ud.pkgdatadir = d.expand("${DL_DIR}/%s" % prefixdir) |
| 86 | if not os.path.exists(ud.pkgdatadir): |
| 87 | bb.utils.mkdirhier(ud.pkgdatadir) |
| 88 | ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest) |
| 89 | |
| 90 | self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate " |
| 91 | self.basecmd += " --directory-prefix=%s " % prefixdir |
| 92 | |
| 93 | ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") |
| 94 | ud.mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version) |
| 95 | ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball) |
| 96 | |
| 97 | def need_update(self, ud, d): |
| 98 | if os.path.exists(ud.localpath): |
| 99 | return False |
| 100 | return True |
| 101 | |
| 102 | def _runwget(self, ud, d, command, quiet): |
| 103 | logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command)) |
| 104 | bb.fetch2.check_network_access(d, command) |
| 105 | runfetchcmd(command, d, quiet) |
| 106 | |
| 107 | def _unpackdep(self, ud, pkg, data, destdir, dldir, d): |
| 108 | file = data[pkg]['tgz'] |
| 109 | logger.debug(2, "file to extract is %s" % file) |
| 110 | if file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'): |
| 111 | cmd = 'tar xz --strip 1 --no-same-owner --warning=no-unknown-keyword -f %s/%s' % (dldir, file) |
| 112 | else: |
| 113 | bb.fatal("NPM package %s downloaded not a tarball!" % file) |
| 114 | |
| 115 | # Change to subdir before executing command |
| 116 | save_cwd = os.getcwd() |
| 117 | if not os.path.exists(destdir): |
| 118 | os.makedirs(destdir) |
| 119 | os.chdir(destdir) |
| 120 | path = d.getVar('PATH', True) |
| 121 | if path: |
| 122 | cmd = "PATH=\"%s\" %s" % (path, cmd) |
| 123 | bb.note("Unpacking %s to %s/" % (file, os.getcwd())) |
| 124 | ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True) |
| 125 | os.chdir(save_cwd) |
| 126 | |
| 127 | if ret != 0: |
| 128 | raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url) |
| 129 | |
| 130 | if 'deps' not in data[pkg]: |
| 131 | return |
| 132 | for dep in data[pkg]['deps']: |
| 133 | self._unpackdep(ud, dep, data[pkg]['deps'], "%s/node_modules/%s" % (destdir, dep), dldir, d) |
| 134 | |
| 135 | |
| 136 | def unpack(self, ud, destdir, d): |
| 137 | dldir = d.getVar("DL_DIR", True) |
| 138 | depdumpfile = "%s-%s.deps.json" % (ud.pkgname, ud.version) |
| 139 | with open("%s/npm/%s" % (dldir, depdumpfile)) as datafile: |
| 140 | workobj = json.load(datafile) |
| 141 | dldir = "%s/%s" % (os.path.dirname(ud.localpath), ud.pkgname) |
| 142 | |
| 143 | self._unpackdep(ud, ud.pkgname, workobj, "%s/npmpkg" % destdir, dldir, d) |
| 144 | |
| 145 | def _parse_view(self, output): |
| 146 | ''' |
| 147 | Parse the output of npm view --json; the last JSON result |
| 148 | is assumed to be the one that we're interested in. |
| 149 | ''' |
| 150 | pdata = None |
| 151 | outdeps = {} |
| 152 | datalines = [] |
| 153 | bracelevel = 0 |
| 154 | for line in output.splitlines(): |
| 155 | if bracelevel: |
| 156 | datalines.append(line) |
| 157 | elif '{' in line: |
| 158 | datalines = [] |
| 159 | datalines.append(line) |
| 160 | bracelevel = bracelevel + line.count('{') - line.count('}') |
| 161 | if datalines: |
| 162 | pdata = json.loads('\n'.join(datalines)) |
| 163 | return pdata |
| 164 | |
| 165 | def _getdependencies(self, pkg, data, version, d, ud, optional=False): |
| 166 | pkgfullname = pkg |
| 167 | if version != '*' and not '/' in version: |
| 168 | pkgfullname += "@'%s'" % version |
| 169 | logger.debug(2, "Calling getdeps on %s" % pkg) |
| 170 | fetchcmd = "npm view %s --json --registry %s" % (pkgfullname, ud.registry) |
| 171 | output = runfetchcmd(fetchcmd, d, True) |
| 172 | pdata = self._parse_view(output) |
| 173 | if not pdata: |
| 174 | raise FetchError("The command '%s' returned no output" % fetchcmd) |
| 175 | if optional: |
| 176 | pkg_os = pdata.get('os', None) |
| 177 | if pkg_os: |
| 178 | if not isinstance(pkg_os, list): |
| 179 | pkg_os = [pkg_os] |
| 180 | if 'linux' not in pkg_os or '!linux' in pkg_os: |
| 181 | logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg) |
| 182 | return |
| 183 | #logger.debug(2, "Output URL is %s - %s - %s" % (ud.basepath, ud.basename, ud.localfile)) |
| 184 | outputurl = pdata['dist']['tarball'] |
| 185 | data[pkg] = {} |
| 186 | data[pkg]['tgz'] = os.path.basename(outputurl) |
| 187 | self._runwget(ud, d, "%s %s" % (self.basecmd, outputurl), False) |
| 188 | |
| 189 | dependencies = pdata.get('dependencies', {}) |
| 190 | optionalDependencies = pdata.get('optionalDependencies', {}) |
| 191 | depsfound = {} |
| 192 | optdepsfound = {} |
| 193 | data[pkg]['deps'] = {} |
| 194 | for dep in dependencies: |
| 195 | if dep in optionalDependencies: |
| 196 | optdepsfound[dep] = dependencies[dep] |
| 197 | else: |
| 198 | depsfound[dep] = dependencies[dep] |
| 199 | for dep, version in optdepsfound.iteritems(): |
| 200 | self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True) |
| 201 | for dep, version in depsfound.iteritems(): |
| 202 | self._getdependencies(dep, data[pkg]['deps'], version, d, ud) |
| 203 | |
| 204 | def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest): |
| 205 | logger.debug(2, "NPM shrinkwrap file is %s" % data) |
| 206 | outputurl = "invalid" |
| 207 | if ('resolved' not in data) or (not data['resolved'].startswith('http')): |
| 208 | # will be the case for ${PN} |
| 209 | fetchcmd = "npm view %s@%s dist.tarball --registry %s" % (pkg, version, ud.registry) |
| 210 | logger.debug(2, "Found this matching URL: %s" % str(fetchcmd)) |
| 211 | outputurl = runfetchcmd(fetchcmd, d, True) |
| 212 | else: |
| 213 | outputurl = data['resolved'] |
| 214 | self._runwget(ud, d, "%s %s" % (self.basecmd, outputurl), False) |
| 215 | manifest[pkg] = {} |
| 216 | manifest[pkg]['tgz'] = os.path.basename(outputurl).rstrip() |
| 217 | manifest[pkg]['deps'] = {} |
| 218 | |
| 219 | if pkg in lockdown: |
| 220 | sha1_expected = lockdown[pkg][version] |
| 221 | sha1_data = bb.utils.sha1_file("npm/%s/%s" % (ud.pkgname, manifest[pkg]['tgz'])) |
| 222 | if sha1_expected != sha1_data: |
| 223 | msg = "\nFile: '%s' has %s checksum %s when %s was expected" % (manifest[pkg]['tgz'], 'sha1', sha1_data, sha1_expected) |
| 224 | raise ChecksumError('Checksum mismatch!%s' % msg) |
| 225 | else: |
| 226 | logger.debug(2, "No lockdown data for %s@%s" % (pkg, version)) |
| 227 | |
| 228 | if 'dependencies' in data: |
| 229 | for obj in data['dependencies']: |
| 230 | logger.debug(2, "Found dep is %s" % str(obj)) |
| 231 | self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest[pkg]['deps']) |
| 232 | |
| 233 | def download(self, ud, d): |
| 234 | """Fetch url""" |
| 235 | jsondepobj = {} |
| 236 | shrinkobj = {} |
| 237 | lockdown = {} |
| 238 | |
| 239 | if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror): |
| 240 | dest = d.getVar("DL_DIR", True) |
| 241 | bb.utils.mkdirhier(dest) |
| 242 | save_cwd = os.getcwd() |
| 243 | os.chdir(dest) |
| 244 | runfetchcmd("tar -xJf %s" % (ud.fullmirror), d) |
| 245 | os.chdir(save_cwd) |
| 246 | return |
| 247 | |
| 248 | shwrf = d.getVar('NPM_SHRINKWRAP', True) |
| 249 | logger.debug(2, "NPM shrinkwrap file is %s" % shwrf) |
| 250 | try: |
| 251 | with open(shwrf) as datafile: |
| 252 | shrinkobj = json.load(datafile) |
| 253 | except: |
| 254 | logger.warn('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname) |
| 255 | lckdf = d.getVar('NPM_LOCKDOWN', True) |
| 256 | logger.debug(2, "NPM lockdown file is %s" % lckdf) |
| 257 | try: |
| 258 | with open(lckdf) as datafile: |
| 259 | lockdown = json.load(datafile) |
| 260 | except: |
| 261 | logger.warn('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname) |
| 262 | |
| 263 | if ('name' not in shrinkobj): |
| 264 | self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud) |
| 265 | else: |
| 266 | self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d, ud, lockdown, jsondepobj) |
| 267 | |
| 268 | with open(ud.localpath, 'w') as outfile: |
| 269 | json.dump(jsondepobj, outfile) |
| 270 | |
| 271 | def build_mirror_data(self, ud, d): |
| 272 | # Generate a mirror tarball if needed |
| 273 | if ud.write_tarballs and not os.path.exists(ud.fullmirror): |
| 274 | # it's possible that this symlink points to read-only filesystem with PREMIRROR |
| 275 | if os.path.islink(ud.fullmirror): |
| 276 | os.unlink(ud.fullmirror) |
| 277 | |
| 278 | save_cwd = os.getcwd() |
| 279 | os.chdir(d.getVar("DL_DIR", True)) |
| 280 | logger.info("Creating tarball of npm data") |
| 281 | runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d) |
| 282 | runfetchcmd("touch %s.done" % (ud.fullmirror), d) |
| 283 | os.chdir(save_cwd) |
| 284 | |