blob: 36fcbfba15e26263339c289e7ba2c66bf160c5eb [file] [log] [blame]
Andrew Geissler82c905d2020-04-13 13:39:40 -05001# Copyright (C) 2020 Savoir-Faire Linux
2#
3# SPDX-License-Identifier: GPL-2.0-only
4#
5"""
6BitBake 'Fetch' npm shrinkwrap implementation
7
8npm fetcher support the SRC_URI with format of:
9SRC_URI = "npmsw://some.registry.url;OptionA=xxx;OptionB=xxx;..."
10
11Supported SRC_URI options are:
12
13- dev
14 Set to 1 to also install devDependencies.
15
16- destsuffix
17 Specifies the directory to use to unpack the dependencies (default: ${S}).
18"""
19
20import json
21import os
22import re
23import bb
24from bb.fetch2 import Fetch
25from bb.fetch2 import FetchMethod
26from bb.fetch2 import ParameterError
Andrew Geisslereff27472021-10-29 15:35:00 -050027from bb.fetch2 import runfetchcmd
Andrew Geissler82c905d2020-04-13 13:39:40 -050028from bb.fetch2 import URI
29from bb.fetch2.npm import npm_integrity
30from bb.fetch2.npm import npm_localfile
31from bb.fetch2.npm import npm_unpack
32from bb.utils import is_semver
Andrew Geisslereff27472021-10-29 15:35:00 -050033from bb.utils import lockfile
34from bb.utils import unlockfile
Andrew Geissler82c905d2020-04-13 13:39:40 -050035
36def foreach_dependencies(shrinkwrap, callback=None, dev=False):
37 """
38 Run a callback for each dependencies of a shrinkwrap file.
39 The callback is using the format:
40 callback(name, params, deptree)
41 with:
42 name = the package name (string)
43 params = the package parameters (dictionary)
44 deptree = the package dependency tree (array of strings)
45 """
46 def _walk_deps(deps, deptree):
47 for name in deps:
48 subtree = [*deptree, name]
49 _walk_deps(deps[name].get("dependencies", {}), subtree)
50 if callback is not None:
51 if deps[name].get("dev", False) and not dev:
52 continue
53 elif deps[name].get("bundled", False):
54 continue
55 callback(name, deps[name], subtree)
56
57 _walk_deps(shrinkwrap.get("dependencies", {}), [])
58
59class NpmShrinkWrap(FetchMethod):
60 """Class to fetch all package from a shrinkwrap file"""
61
62 def supports(self, ud, d):
63 """Check if a given url can be fetched with npmsw"""
64 return ud.type in ["npmsw"]
65
66 def urldata_init(self, ud, d):
67 """Init npmsw specific variables within url data"""
68
69 # Get the 'shrinkwrap' parameter
70 ud.shrinkwrap_file = re.sub(r"^npmsw://", "", ud.url.split(";")[0])
71
72 # Get the 'dev' parameter
73 ud.dev = bb.utils.to_boolean(ud.parm.get("dev"), False)
74
75 # Resolve the dependencies
76 ud.deps = []
77
78 def _resolve_dependency(name, params, deptree):
79 url = None
80 localpath = None
81 extrapaths = []
82 destsubdirs = [os.path.join("node_modules", dep) for dep in deptree]
83 destsuffix = os.path.join(*destsubdirs)
Andrew Geisslereff27472021-10-29 15:35:00 -050084 unpack = True
Andrew Geissler82c905d2020-04-13 13:39:40 -050085
86 integrity = params.get("integrity", None)
87 resolved = params.get("resolved", None)
88 version = params.get("version", None)
89
90 # Handle registry sources
Andrew Geissler595f6302022-01-24 19:11:47 +000091 if is_semver(version) and integrity:
92 # Handle duplicate dependencies without url
93 if not resolved:
94 return
95
Andrew Geissler82c905d2020-04-13 13:39:40 -050096 localfile = npm_localfile(name, version)
97
98 uri = URI(resolved)
99 uri.params["downloadfilename"] = localfile
100
101 checksum_name, checksum_expected = npm_integrity(integrity)
102 uri.params[checksum_name] = checksum_expected
103
104 url = str(uri)
105
106 localpath = os.path.join(d.getVar("DL_DIR"), localfile)
107
108 # Create a resolve file to mimic the npm fetcher and allow
109 # re-usability of the downloaded file.
110 resolvefile = localpath + ".resolved"
111
112 bb.utils.mkdirhier(os.path.dirname(resolvefile))
113 with open(resolvefile, "w") as f:
114 f.write(url)
115
116 extrapaths.append(resolvefile)
117
118 # Handle http tarball sources
119 elif version.startswith("http") and integrity:
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000120 localfile = npm_localfile(os.path.basename(version))
Andrew Geissler82c905d2020-04-13 13:39:40 -0500121
122 uri = URI(version)
123 uri.params["downloadfilename"] = localfile
124
125 checksum_name, checksum_expected = npm_integrity(integrity)
126 uri.params[checksum_name] = checksum_expected
127
128 url = str(uri)
129
130 localpath = os.path.join(d.getVar("DL_DIR"), localfile)
131
Andrew Geissler6aa7eec2023-03-03 12:41:14 -0600132 # Handle local tarball and link sources
133 elif version.startswith("file"):
134 localpath = version[5:]
135 if not version.endswith(".tgz"):
136 unpack = False
137
Andrew Geissler82c905d2020-04-13 13:39:40 -0500138 # Handle git sources
Andrew Geissler6aa7eec2023-03-03 12:41:14 -0600139 elif version.startswith(("git", "bitbucket","gist")) or (
140 not version.endswith((".tgz", ".tar", ".tar.gz"))
141 and not version.startswith((".", "@", "/"))
142 and "/" in version
143 ):
Andrew Geissler595f6302022-01-24 19:11:47 +0000144 if version.startswith("github:"):
145 version = "git+https://github.com/" + version[len("github:"):]
Andrew Geissler6aa7eec2023-03-03 12:41:14 -0600146 elif version.startswith("gist:"):
147 version = "git+https://gist.github.com/" + version[len("gist:"):]
148 elif version.startswith("bitbucket:"):
149 version = "git+https://bitbucket.org/" + version[len("bitbucket:"):]
150 elif version.startswith("gitlab:"):
151 version = "git+https://gitlab.com/" + version[len("gitlab:"):]
152 elif not version.startswith(("git+","git:")):
153 version = "git+https://github.com/" + version
Andrew Geissler82c905d2020-04-13 13:39:40 -0500154 regex = re.compile(r"""
155 ^
156 git\+
157 (?P<protocol>[a-z]+)
158 ://
159 (?P<url>[^#]+)
160 \#
161 (?P<rev>[0-9a-f]+)
162 $
163 """, re.VERBOSE)
164
165 match = regex.match(version)
166
167 if not match:
168 raise ParameterError("Invalid git url: %s" % version, ud.url)
169
170 groups = match.groupdict()
171
172 uri = URI("git://" + str(groups["url"]))
173 uri.params["protocol"] = str(groups["protocol"])
174 uri.params["rev"] = str(groups["rev"])
175 uri.params["destsuffix"] = destsuffix
176
177 url = str(uri)
178
Andrew Geissler82c905d2020-04-13 13:39:40 -0500179 else:
180 raise ParameterError("Unsupported dependency: %s" % name, ud.url)
181
182 ud.deps.append({
183 "url": url,
184 "localpath": localpath,
185 "extrapaths": extrapaths,
186 "destsuffix": destsuffix,
Andrew Geisslereff27472021-10-29 15:35:00 -0500187 "unpack": unpack,
Andrew Geissler82c905d2020-04-13 13:39:40 -0500188 })
189
190 try:
191 with open(ud.shrinkwrap_file, "r") as f:
192 shrinkwrap = json.load(f)
193 except Exception as e:
194 raise ParameterError("Invalid shrinkwrap file: %s" % str(e), ud.url)
195
196 foreach_dependencies(shrinkwrap, _resolve_dependency, ud.dev)
197
198 # Avoid conflicts between the environment data and:
199 # - the proxy url revision
200 # - the proxy url checksum
201 data = bb.data.createCopy(d)
202 data.delVar("SRCREV")
203 data.delVarFlags("SRC_URI")
204
205 # This fetcher resolves multiple URIs from a shrinkwrap file and then
206 # forwards it to a proxy fetcher. The management of the donestamp file,
207 # the lockfile and the checksums are forwarded to the proxy fetcher.
Andrew Geisslereff27472021-10-29 15:35:00 -0500208 ud.proxy = Fetch([dep["url"] for dep in ud.deps if dep["url"]], data)
Andrew Geissler82c905d2020-04-13 13:39:40 -0500209 ud.needdonestamp = False
210
211 @staticmethod
212 def _foreach_proxy_method(ud, handle):
213 returns = []
214 for proxy_url in ud.proxy.urls:
215 proxy_ud = ud.proxy.ud[proxy_url]
216 proxy_d = ud.proxy.d
217 proxy_ud.setup_localpath(proxy_d)
Andrew Geisslereff27472021-10-29 15:35:00 -0500218 lf = lockfile(proxy_ud.lockfile)
Andrew Geissler82c905d2020-04-13 13:39:40 -0500219 returns.append(handle(proxy_ud.method, proxy_ud, proxy_d))
Andrew Geisslereff27472021-10-29 15:35:00 -0500220 unlockfile(lf)
Andrew Geissler82c905d2020-04-13 13:39:40 -0500221 return returns
222
223 def verify_donestamp(self, ud, d):
224 """Verify the donestamp file"""
225 def _handle(m, ud, d):
226 return m.verify_donestamp(ud, d)
227 return all(self._foreach_proxy_method(ud, _handle))
228
229 def update_donestamp(self, ud, d):
230 """Update the donestamp file"""
231 def _handle(m, ud, d):
232 m.update_donestamp(ud, d)
233 self._foreach_proxy_method(ud, _handle)
234
235 def need_update(self, ud, d):
236 """Force a fetch, even if localpath exists ?"""
237 def _handle(m, ud, d):
238 return m.need_update(ud, d)
239 return all(self._foreach_proxy_method(ud, _handle))
240
241 def try_mirrors(self, fetch, ud, d, mirrors):
242 """Try to use a mirror"""
243 def _handle(m, ud, d):
244 return m.try_mirrors(fetch, ud, d, mirrors)
245 return all(self._foreach_proxy_method(ud, _handle))
246
247 def download(self, ud, d):
248 """Fetch url"""
249 ud.proxy.download()
250
251 def unpack(self, ud, rootdir, d):
252 """Unpack the downloaded dependencies"""
253 destdir = d.getVar("S")
254 destsuffix = ud.parm.get("destsuffix")
255 if destsuffix:
256 destdir = os.path.join(rootdir, destsuffix)
257
258 bb.utils.mkdirhier(destdir)
259 bb.utils.copyfile(ud.shrinkwrap_file,
260 os.path.join(destdir, "npm-shrinkwrap.json"))
261
262 auto = [dep["url"] for dep in ud.deps if not dep["localpath"]]
263 manual = [dep for dep in ud.deps if dep["localpath"]]
264
265 if auto:
266 ud.proxy.unpack(destdir, auto)
267
268 for dep in manual:
269 depdestdir = os.path.join(destdir, dep["destsuffix"])
Andrew Geisslereff27472021-10-29 15:35:00 -0500270 if dep["url"]:
271 npm_unpack(dep["localpath"], depdestdir, d)
272 else:
273 depsrcdir= os.path.join(destdir, dep["localpath"])
274 if dep["unpack"]:
275 npm_unpack(depsrcdir, depdestdir, d)
276 else:
277 bb.utils.mkdirhier(depdestdir)
278 cmd = 'cp -fpPRH "%s/." .' % (depsrcdir)
279 runfetchcmd(cmd, d, workdir=depdestdir)
Andrew Geissler82c905d2020-04-13 13:39:40 -0500280
281 def clean(self, ud, d):
282 """Clean any existing full or partial download"""
283 ud.proxy.clean()
284
285 # Clean extra files
286 for dep in ud.deps:
287 for path in dep["extrapaths"]:
288 bb.utils.remove(path)
289
290 def done(self, ud, d):
291 """Is the download done ?"""
292 def _handle(m, ud, d):
293 return m.done(ud, d)
294 return all(self._foreach_proxy_method(ud, _handle))