blob: cf0201c4903597227ade888022f6b0051608ecee [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001"""
2BitBake 'Fetch' implementations
3
4Classes for obtaining upstream sources for the
5BitBake build tools.
6"""
7
8# Copyright (C) 2003, 2004 Chris Larson
9# Copyright (C) 2012 Intel Corporation
10#
Brad Bishopc342db32019-05-15 21:57:59 -040011# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050012#
13# Based on functions from the base bb module, Copyright 2003 Holger Schurig
14
Patrick Williamsc124f4f2015-09-15 14:41:29 -050015import os, re
16import signal
Patrick Williamsc124f4f2015-09-15 14:41:29 -050017import logging
Patrick Williamsc0f7c042017-02-23 20:41:17 -060018import urllib.request, urllib.parse, urllib.error
19if 'git' not in urllib.parse.uses_netloc:
20 urllib.parse.uses_netloc.append('git')
21import operator
22import collections
23import subprocess
24import pickle
Brad Bishop6e60e8b2018-02-01 10:27:11 -050025import errno
Patrick Williamsc124f4f2015-09-15 14:41:29 -050026import bb.persist_data, bb.utils
27import bb.checksum
Patrick Williamsc124f4f2015-09-15 14:41:29 -050028import bb.process
Brad Bishopd7bf8c12018-02-25 22:55:05 -050029import bb.event
Patrick Williamsc124f4f2015-09-15 14:41:29 -050030
31__version__ = "2"
32_checksum_cache = bb.checksum.FileChecksumCache()
33
34logger = logging.getLogger("BitBake.Fetcher")
35
Andrew Geissler82c905d2020-04-13 13:39:40 -050036CHECKSUM_LIST = [ "md5", "sha256", "sha1", "sha384", "sha512" ]
37SHOWN_CHECKSUM_LIST = ["sha256"]
38
Patrick Williamsc124f4f2015-09-15 14:41:29 -050039class BBFetchException(Exception):
40 """Class all fetch exceptions inherit from"""
41 def __init__(self, message):
Brad Bishopd7bf8c12018-02-25 22:55:05 -050042 self.msg = message
43 Exception.__init__(self, message)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050044
45 def __str__(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -050046 return self.msg
Patrick Williamsc124f4f2015-09-15 14:41:29 -050047
48class UntrustedUrl(BBFetchException):
49 """Exception raised when encountering a host not listed in BB_ALLOWED_NETWORKS"""
50 def __init__(self, url, message=''):
51 if message:
52 msg = message
53 else:
54 msg = "The URL: '%s' is not trusted and cannot be used" % url
55 self.url = url
56 BBFetchException.__init__(self, msg)
57 self.args = (url,)
58
59class MalformedUrl(BBFetchException):
60 """Exception raised when encountering an invalid url"""
61 def __init__(self, url, message=''):
Brad Bishopd7bf8c12018-02-25 22:55:05 -050062 if message:
63 msg = message
64 else:
65 msg = "The URL: '%s' is invalid and cannot be interpreted" % url
66 self.url = url
67 BBFetchException.__init__(self, msg)
68 self.args = (url,)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050069
70class FetchError(BBFetchException):
71 """General fetcher exception when something happens incorrectly"""
72 def __init__(self, message, url = None):
Brad Bishopd7bf8c12018-02-25 22:55:05 -050073 if url:
Patrick Williamsc124f4f2015-09-15 14:41:29 -050074 msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
Brad Bishopd7bf8c12018-02-25 22:55:05 -050075 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -050076 msg = "Fetcher failure: %s" % message
Brad Bishopd7bf8c12018-02-25 22:55:05 -050077 self.url = url
78 BBFetchException.__init__(self, msg)
79 self.args = (message, url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050080
81class ChecksumError(FetchError):
82 """Exception when mismatched checksum encountered"""
83 def __init__(self, message, url = None, checksum = None):
84 self.checksum = checksum
85 FetchError.__init__(self, message, url)
86
87class NoChecksumError(FetchError):
88 """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
89
90class UnpackError(BBFetchException):
91 """General fetcher exception when something happens incorrectly when unpacking"""
92 def __init__(self, message, url):
Brad Bishopd7bf8c12018-02-25 22:55:05 -050093 msg = "Unpack failure for URL: '%s'. %s" % (url, message)
94 self.url = url
95 BBFetchException.__init__(self, msg)
96 self.args = (message, url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050097
98class NoMethodError(BBFetchException):
99 """Exception raised when there is no method to obtain a supplied url or set of urls"""
100 def __init__(self, url):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500101 msg = "Could not find a fetcher which supports the URL: '%s'" % url
102 self.url = url
103 BBFetchException.__init__(self, msg)
104 self.args = (url,)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500105
106class MissingParameterError(BBFetchException):
107 """Exception raised when a fetch method is missing a critical parameter in the url"""
108 def __init__(self, missing, url):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500109 msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
110 self.url = url
111 self.missing = missing
112 BBFetchException.__init__(self, msg)
113 self.args = (missing, url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500114
115class ParameterError(BBFetchException):
116 """Exception raised when a url cannot be proccessed due to invalid parameters."""
117 def __init__(self, message, url):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500118 msg = "URL: '%s' has invalid parameters. %s" % (url, message)
119 self.url = url
120 BBFetchException.__init__(self, msg)
121 self.args = (message, url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500122
123class NetworkAccess(BBFetchException):
124 """Exception raised when network access is disabled but it is required."""
125 def __init__(self, url, cmd):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500126 msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
127 self.url = url
128 self.cmd = cmd
129 BBFetchException.__init__(self, msg)
130 self.args = (url, cmd)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500131
132class NonLocalMethod(Exception):
133 def __init__(self):
134 Exception.__init__(self)
135
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500136class MissingChecksumEvent(bb.event.Event):
Andrew Geissler82c905d2020-04-13 13:39:40 -0500137 def __init__(self, url, **checksums):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500138 self.url = url
Andrew Geissler82c905d2020-04-13 13:39:40 -0500139 self.checksums = checksums
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500140 bb.event.Event.__init__(self)
141
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500142
143class URI(object):
144 """
145 A class representing a generic URI, with methods for
146 accessing the URI components, and stringifies to the
147 URI.
148
149 It is constructed by calling it with a URI, or setting
150 the attributes manually:
151
152 uri = URI("http://example.com/")
153
154 uri = URI()
155 uri.scheme = 'http'
156 uri.hostname = 'example.com'
157 uri.path = '/'
158
159 It has the following attributes:
160
161 * scheme (read/write)
162 * userinfo (authentication information) (read/write)
163 * username (read/write)
164 * password (read/write)
165
166 Note, password is deprecated as of RFC 3986.
167
168 * hostname (read/write)
169 * port (read/write)
170 * hostport (read only)
171 "hostname:port", if both are set, otherwise just "hostname"
172 * path (read/write)
173 * path_quoted (read/write)
174 A URI quoted version of path
175 * params (dict) (read/write)
176 * query (dict) (read/write)
177 * relative (bool) (read only)
178 True if this is a "relative URI", (e.g. file:foo.diff)
179
180 It stringifies to the URI itself.
181
182 Some notes about relative URIs: while it's specified that
183 a URI beginning with <scheme>:// should either be directly
184 followed by a hostname or a /, the old URI handling of the
185 fetch2 library did not comform to this. Therefore, this URI
186 class has some kludges to make sure that URIs are parsed in
187 a way comforming to bitbake's current usage. This URI class
188 supports the following:
189
190 file:relative/path.diff (IETF compliant)
191 git:relative/path.git (IETF compliant)
192 git:///absolute/path.git (IETF compliant)
193 file:///absolute/path.diff (IETF compliant)
194
195 file://relative/path.diff (not IETF compliant)
196
197 But it does not support the following:
198
199 file://hostname/absolute/path.diff (would be IETF compliant)
200
201 Note that the last case only applies to a list of
202 "whitelisted" schemes (currently only file://), that requires
203 its URIs to not have a network location.
204 """
205
206 _relative_schemes = ['file', 'git']
207 _netloc_forbidden = ['file']
208
209 def __init__(self, uri=None):
210 self.scheme = ''
211 self.userinfo = ''
212 self.hostname = ''
213 self.port = None
214 self._path = ''
215 self.params = {}
216 self.query = {}
217 self.relative = False
218
219 if not uri:
220 return
221
222 # We hijack the URL parameters, since the way bitbake uses
223 # them are not quite RFC compliant.
224 uri, param_str = (uri.split(";", 1) + [None])[:2]
225
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600226 urlp = urllib.parse.urlparse(uri)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500227 self.scheme = urlp.scheme
228
229 reparse = 0
230
231 # Coerce urlparse to make URI scheme use netloc
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600232 if not self.scheme in urllib.parse.uses_netloc:
233 urllib.parse.uses_params.append(self.scheme)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500234 reparse = 1
235
236 # Make urlparse happy(/ier) by converting local resources
237 # to RFC compliant URL format. E.g.:
238 # file://foo.diff -> file:foo.diff
239 if urlp.scheme in self._netloc_forbidden:
240 uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
241 reparse = 1
242
243 if reparse:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600244 urlp = urllib.parse.urlparse(uri)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500245
246 # Identify if the URI is relative or not
247 if urlp.scheme in self._relative_schemes and \
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800248 re.compile(r"^\w+:(?!//)").match(uri):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500249 self.relative = True
250
251 if not self.relative:
252 self.hostname = urlp.hostname or ''
253 self.port = urlp.port
254
255 self.userinfo += urlp.username or ''
256
257 if urlp.password:
258 self.userinfo += ':%s' % urlp.password
259
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600260 self.path = urllib.parse.unquote(urlp.path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500261
262 if param_str:
263 self.params = self._param_str_split(param_str, ";")
264 if urlp.query:
265 self.query = self._param_str_split(urlp.query, "&")
266
267 def __str__(self):
268 userinfo = self.userinfo
269 if userinfo:
270 userinfo += '@'
271
272 return "%s:%s%s%s%s%s%s" % (
273 self.scheme,
274 '' if self.relative else '//',
275 userinfo,
276 self.hostport,
277 self.path_quoted,
278 self._query_str(),
279 self._param_str())
280
281 def _param_str(self):
282 return (
283 ''.join([';', self._param_str_join(self.params, ";")])
284 if self.params else '')
285
286 def _query_str(self):
287 return (
288 ''.join(['?', self._param_str_join(self.query, "&")])
289 if self.query else '')
290
291 def _param_str_split(self, string, elmdelim, kvdelim="="):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600292 ret = collections.OrderedDict()
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600293 for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim) if x]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500294 ret[k] = v
295 return ret
296
297 def _param_str_join(self, dict_, elmdelim, kvdelim="="):
298 return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
299
300 @property
301 def hostport(self):
302 if not self.port:
303 return self.hostname
304 return "%s:%d" % (self.hostname, self.port)
305
306 @property
307 def path_quoted(self):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600308 return urllib.parse.quote(self.path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500309
310 @path_quoted.setter
311 def path_quoted(self, path):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600312 self.path = urllib.parse.unquote(path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500313
314 @property
315 def path(self):
316 return self._path
317
318 @path.setter
319 def path(self, path):
320 self._path = path
321
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500322 if not path or re.compile("^/").match(path):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500323 self.relative = False
324 else:
325 self.relative = True
326
327 @property
328 def username(self):
329 if self.userinfo:
330 return (self.userinfo.split(":", 1))[0]
331 return ''
332
333 @username.setter
334 def username(self, username):
335 password = self.password
336 self.userinfo = username
337 if password:
338 self.userinfo += ":%s" % password
339
340 @property
341 def password(self):
342 if self.userinfo and ":" in self.userinfo:
343 return (self.userinfo.split(":", 1))[1]
344 return ''
345
346 @password.setter
347 def password(self, password):
348 self.userinfo = "%s:%s" % (self.username, password)
349
350def decodeurl(url):
351 """Decodes an URL into the tokens (scheme, network location, path,
352 user, password, parameters).
353 """
354
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500355 m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500356 if not m:
357 raise MalformedUrl(url)
358
359 type = m.group('type')
360 location = m.group('location')
361 if not location:
362 raise MalformedUrl(url)
363 user = m.group('user')
364 parm = m.group('parm')
365
366 locidx = location.find('/')
367 if locidx != -1 and type.lower() != 'file':
368 host = location[:locidx]
369 path = location[locidx:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500370 elif type.lower() == 'file':
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500371 host = ""
372 path = location
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500373 else:
374 host = location
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800375 path = "/"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500376 if user:
377 m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
378 if m:
379 user = m.group('user')
380 pswd = m.group('pswd')
381 else:
382 user = ''
383 pswd = ''
384
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600385 p = collections.OrderedDict()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500386 if parm:
387 for s in parm.split(';'):
388 if s:
389 if not '=' in s:
390 raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
391 s1, s2 = s.split('=')
392 p[s1] = s2
393
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600394 return type, host, urllib.parse.unquote(path), user, pswd, p
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500395
396def encodeurl(decoded):
397 """Encodes a URL from tokens (scheme, network location, path,
398 user, password, parameters).
399 """
400
401 type, host, path, user, pswd, p = decoded
402
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500403 if not type:
404 raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
405 url = '%s://' % type
406 if user and type != "file":
407 url += "%s" % user
408 if pswd:
409 url += ":%s" % pswd
410 url += "@"
411 if host and type != "file":
412 url += "%s" % host
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500413 if path:
414 # Standardise path to ensure comparisons work
415 while '//' in path:
416 path = path.replace("//", "/")
417 url += "%s" % urllib.parse.quote(path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500418 if p:
419 for parm in p:
420 url += ";%s=%s" % (parm, p[parm])
421
422 return url
423
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500424def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500425 if not ud.url or not uri_find or not uri_replace:
426 logger.error("uri_replace: passed an undefined value, not replacing")
427 return None
428 uri_decoded = list(decodeurl(ud.url))
429 uri_find_decoded = list(decodeurl(uri_find))
430 uri_replace_decoded = list(decodeurl(uri_replace))
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600431 logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500432 result_decoded = ['', '', '', '', '', {}]
433 for loc, i in enumerate(uri_find_decoded):
434 result_decoded[loc] = uri_decoded[loc]
435 regexp = i
436 if loc == 0 and regexp and not regexp.endswith("$"):
437 # Leaving the type unanchored can mean "https" matching "file" can become "files"
438 # which is clearly undesirable.
439 regexp += "$"
440 if loc == 5:
441 # Handle URL parameters
442 if i:
443 # Any specified URL parameters must match
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800444 for k in uri_find_decoded[loc]:
445 if uri_decoded[loc][k] != uri_find_decoded[loc][k]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500446 return None
447 # Overwrite any specified replacement parameters
448 for k in uri_replace_decoded[loc]:
449 for l in replacements:
450 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
451 result_decoded[loc][k] = uri_replace_decoded[loc][k]
452 elif (re.match(regexp, uri_decoded[loc])):
453 if not uri_replace_decoded[loc]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500454 result_decoded[loc] = ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500455 else:
456 for k in replacements:
457 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
458 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
Patrick Williamsd7e96312015-09-22 08:09:05 -0500459 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500460 if loc == 2:
461 # Handle path manipulations
462 basename = None
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500463 if uri_decoded[0] != uri_replace_decoded[0] and mirrortarball:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500464 # If the source and destination url types differ, must be a mirrortarball mapping
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500465 basename = os.path.basename(mirrortarball)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500466 # Kill parameters, they make no sense for mirror tarballs
467 uri_decoded[5] = {}
468 elif ud.localpath and ud.method.supports_checksum(ud):
469 basename = os.path.basename(ud.localpath)
470 if basename and not result_decoded[loc].endswith(basename):
471 result_decoded[loc] = os.path.join(result_decoded[loc], basename)
472 else:
473 return None
474 result = encodeurl(result_decoded)
475 if result == ud.url:
476 return None
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600477 logger.debug2("For url %s returning %s" % (ud.url, result))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500478 return result
479
480methods = []
481urldata_cache = {}
482saved_headrevs = {}
483
484def fetcher_init(d):
485 """
486 Called to initialize the fetchers once the configuration data is known.
487 Calls before this must not hit the cache.
488 """
Andrew Geissler82c905d2020-04-13 13:39:40 -0500489
490 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
491 try:
492 # fetcher_init is called multiple times, so make sure we only save the
493 # revs the first time it is called.
494 if not bb.fetch2.saved_headrevs:
495 bb.fetch2.saved_headrevs = dict(revs)
496 except:
497 pass
498
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500499 # When to drop SCM head revisions controlled by user policy
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500500 srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500501 if srcrev_policy == "cache":
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600502 logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500503 elif srcrev_policy == "clear":
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600504 logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500505 revs.clear()
506 else:
507 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
508
509 _checksum_cache.init_cache(d)
510
511 for m in methods:
512 if hasattr(m, "init"):
513 m.init(d)
514
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500515def fetcher_parse_save():
516 _checksum_cache.save_extras()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500517
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500518def fetcher_parse_done():
519 _checksum_cache.save_merge()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500520
Brad Bishop19323692019-04-05 15:28:33 -0400521def fetcher_compare_revisions(d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500522 """
Andrew Geissler82c905d2020-04-13 13:39:40 -0500523 Compare the revisions in the persistent cache with the saved values from
524 when bitbake was started and return true if they have changed.
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500525 """
526
Andrew Geissler82c905d2020-04-13 13:39:40 -0500527 headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d))
528 return headrevs != bb.fetch2.saved_headrevs
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500529
530def mirror_from_string(data):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500531 mirrors = (data or "").replace('\\n',' ').split()
532 # Split into pairs
533 if len(mirrors) % 2 != 0:
534 bb.warn('Invalid mirror data %s, should have paired members.' % data)
535 return list(zip(*[iter(mirrors)]*2))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536
537def verify_checksum(ud, d, precomputed={}):
538 """
539 verify the MD5 and SHA256 checksum for downloaded src
540
541 Raises a FetchError if one or both of the SRC_URI checksums do not match
542 the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
543 checksums specified.
544
545 Returns a dict of checksums that can be stored in a done stamp file and
546 passed in as precomputed parameter in a later call to avoid re-computing
547 the checksums from the file. This allows verifying the checksums of the
548 file against those in the recipe each time, rather than only after
549 downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
550 """
551
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500552 if ud.ignore_checksums or not ud.method.supports_checksum(ud):
553 return {}
554
Andrew Geissler82c905d2020-04-13 13:39:40 -0500555 def compute_checksum_info(checksum_id):
556 checksum_name = getattr(ud, "%s_name" % checksum_id)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500557
Andrew Geissler82c905d2020-04-13 13:39:40 -0500558 if checksum_id in precomputed:
559 checksum_data = precomputed[checksum_id]
560 else:
561 checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(ud.localpath)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500562
Andrew Geissler82c905d2020-04-13 13:39:40 -0500563 checksum_expected = getattr(ud, "%s_expected" % checksum_id)
564
565 return {
566 "id": checksum_id,
567 "name": checksum_name,
568 "data": checksum_data,
569 "expected": checksum_expected
570 }
571
572 checksum_infos = []
573 for checksum_id in CHECKSUM_LIST:
574 checksum_infos.append(compute_checksum_info(checksum_id))
575
576 checksum_dict = {ci["id"] : ci["data"] for ci in checksum_infos}
577 checksum_event = {"%ssum" % ci["id"] : ci["data"] for ci in checksum_infos}
578
579 for ci in checksum_infos:
580 if ci["id"] in SHOWN_CHECKSUM_LIST:
581 checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])]
582
583 # If no checksum has been provided
584 if ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos):
585 messages = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500586 strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500587
Andrew Geissler82c905d2020-04-13 13:39:40 -0500588 # If strict checking enabled and neither sum defined, raise error
589 if strict == "1":
590 messages.append("No checksum specified for '%s', please add at " \
591 "least one to the recipe:" % ud.localpath)
592 messages.extend(checksum_lines)
593 logger.error("\n".join(messages))
594 raise NoChecksumError("Missing SRC_URI checksum", ud.url)
595
596 bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500597
598 if strict == "ignore":
Andrew Geissler82c905d2020-04-13 13:39:40 -0500599 return checksum_dict
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500600
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500601 # Log missing sums so user can more easily add them
Andrew Geissler82c905d2020-04-13 13:39:40 -0500602 messages.append("Missing checksum for '%s', consider adding at " \
603 "least one to the recipe:" % ud.localpath)
604 messages.extend(checksum_lines)
605 logger.warning("\n".join(messages))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500606
607 # We want to alert the user if a checksum is defined in the recipe but
608 # it does not match.
Andrew Geissler82c905d2020-04-13 13:39:40 -0500609 messages = []
610 messages.append("Checksum mismatch!")
611 bad_checksum = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500612
Andrew Geissler82c905d2020-04-13 13:39:40 -0500613 for ci in checksum_infos:
614 if ci["expected"] and ci["expected"] != ci["data"]:
615 messages.append("File: '%s' has %s checksum %s when %s was " \
616 "expected" % (ud.localpath, ci["id"], ci["data"], ci["expected"]))
617 bad_checksum = ci["data"]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500618
Andrew Geissler82c905d2020-04-13 13:39:40 -0500619 if bad_checksum:
620 messages.append("If this change is expected (e.g. you have upgraded " \
621 "to a new version without updating the checksums) " \
622 "then you can use these lines within the recipe:")
623 messages.extend(checksum_lines)
624 messages.append("Otherwise you should retry the download and/or " \
625 "check with upstream to determine if the file has " \
626 "become corrupted or otherwise unexpectedly modified.")
627 raise ChecksumError("\n".join(messages), ud.url, bad_checksum)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500628
Andrew Geissler82c905d2020-04-13 13:39:40 -0500629 return checksum_dict
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500630
631def verify_donestamp(ud, d, origud=None):
632 """
633 Check whether the done stamp file has the right checksums (if the fetch
634 method supports them). If it doesn't, delete the done stamp and force
635 a re-download.
636
637 Returns True, if the donestamp exists and is valid, False otherwise. When
638 returning False, any existing done stamps are removed.
639 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500640 if not ud.needdonestamp or (origud and not origud.needdonestamp):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500641 return True
642
Brad Bishop316dfdd2018-06-25 12:45:53 -0400643 if not os.path.exists(ud.localpath):
644 # local path does not exist
645 if os.path.exists(ud.donestamp):
646 # done stamp exists, but the downloaded file does not; the done stamp
647 # must be incorrect, re-trigger the download
648 bb.utils.remove(ud.donestamp)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500649 return False
650
651 if (not ud.method.supports_checksum(ud) or
652 (origud and not origud.method.supports_checksum(origud))):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400653 # if done stamp exists and checksums not supported; assume the local
654 # file is current
655 return os.path.exists(ud.donestamp)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500656
657 precomputed_checksums = {}
658 # Only re-use the precomputed checksums if the donestamp is newer than the
659 # file. Do not rely on the mtime of directories, though. If ud.localpath is
660 # a directory, there will probably not be any checksums anyway.
Brad Bishop316dfdd2018-06-25 12:45:53 -0400661 if os.path.exists(ud.donestamp) and (os.path.isdir(ud.localpath) or
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500662 os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
663 try:
664 with open(ud.donestamp, "rb") as cachefile:
665 pickled = pickle.Unpickler(cachefile)
666 precomputed_checksums.update(pickled.load())
667 except Exception as e:
668 # Avoid the warnings on the upgrade path from emtpy done stamp
669 # files to those containing the checksums.
670 if not isinstance(e, EOFError):
671 # Ignore errors, they aren't fatal
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600672 logger.warning("Couldn't load checksums from donestamp %s: %s "
673 "(msg: %s)" % (ud.donestamp, type(e).__name__,
674 str(e)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500675
676 try:
677 checksums = verify_checksum(ud, d, precomputed_checksums)
678 # If the cache file did not have the checksums, compute and store them
679 # as an upgrade path from the previous done stamp file format.
680 if checksums != precomputed_checksums:
681 with open(ud.donestamp, "wb") as cachefile:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600682 p = pickle.Pickler(cachefile, 2)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500683 p.dump(checksums)
684 return True
685 except ChecksumError as e:
686 # Checksums failed to verify, trigger re-download and remove the
687 # incorrect stamp file.
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600688 logger.warning("Checksum mismatch for local file %s\n"
689 "Cleaning and trying again." % ud.localpath)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500690 if os.path.exists(ud.localpath):
691 rename_bad_checksum(ud, e.checksum)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500692 bb.utils.remove(ud.donestamp)
693 return False
694
695
696def update_stamp(ud, d):
697 """
698 donestamp is file stamp indicating the whole fetching is done
699 this function update the stamp after verifying the checksum
700 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500701 if not ud.needdonestamp:
702 return
703
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500704 if os.path.exists(ud.donestamp):
705 # Touch the done stamp file to show active use of the download
706 try:
707 os.utime(ud.donestamp, None)
708 except:
709 # Errors aren't fatal here
710 pass
711 else:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500712 try:
713 checksums = verify_checksum(ud, d)
714 # Store the checksums for later re-verification against the recipe
715 with open(ud.donestamp, "wb") as cachefile:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600716 p = pickle.Pickler(cachefile, 2)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500717 p.dump(checksums)
718 except ChecksumError as e:
719 # Checksums failed to verify, trigger re-download and remove the
720 # incorrect stamp file.
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600721 logger.warning("Checksum mismatch for local file %s\n"
722 "Cleaning and trying again." % ud.localpath)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500723 if os.path.exists(ud.localpath):
724 rename_bad_checksum(ud, e.checksum)
725 bb.utils.remove(ud.donestamp)
726 raise
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500727
728def subprocess_setup():
729 # Python installs a SIGPIPE handler by default. This is usually not what
730 # non-Python subprocesses expect.
731 # SIGPIPE errors are known issues with gzip/bash
732 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
733
734def get_autorev(d):
735 # only not cache src rev in autorev case
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500736 if d.getVar('BB_SRCREV_POLICY') != "cache":
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500737 d.setVar('BB_DONT_CACHE', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500738 return "AUTOINC"
739
740def get_srcrev(d, method_name='sortable_revision'):
741 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500742 Return the revision string, usually for use in the version string (PV) of the current package
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500743 Most packages usually only have one SCM so we just pass on the call.
744 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
745 have been set.
746
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500747 The idea here is that we put the string "AUTOINC+" into return value if the revisions are not
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500748 incremental, other code is then responsible for turning that into an increasing value (if needed)
749
750 A method_name can be supplied to retrieve an alternatively formatted revision from a fetcher, if
751 that fetcher provides a method with the given name and the same signature as sortable_revision.
752 """
753
754 scms = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500755 fetcher = Fetch(d.getVar('SRC_URI').split(), d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500756 urldata = fetcher.ud
757 for u in urldata:
758 if urldata[u].method.supports_srcrev():
759 scms.append(u)
760
761 if len(scms) == 0:
762 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
763
764 if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
765 autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
766 if len(rev) > 10:
767 rev = rev[:10]
768 if autoinc:
769 return "AUTOINC+" + rev
770 return rev
771
772 #
773 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
774 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500775 format = d.getVar('SRCREV_FORMAT')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500776 if not format:
Brad Bishop19323692019-04-05 15:28:33 -0400777 raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.\n"\
778 "The SCMs are:\n%s" % '\n'.join(scms))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500779
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600780 name_to_rev = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500781 seenautoinc = False
782 for scm in scms:
783 ud = urldata[scm]
784 for name in ud.names:
785 autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
786 seenautoinc = seenautoinc or autoinc
787 if len(rev) > 10:
788 rev = rev[:10]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600789 name_to_rev[name] = rev
790 # Replace names by revisions in the SRCREV_FORMAT string. The approach used
791 # here can handle names being prefixes of other names and names appearing
792 # as substrings in revisions (in which case the name should not be
793 # expanded). The '|' regular expression operator tries matches from left to
794 # right, so we need to sort the names with the longest ones first.
795 names_descending_len = sorted(name_to_rev, key=len, reverse=True)
796 name_to_rev_re = "|".join(re.escape(name) for name in names_descending_len)
797 format = re.sub(name_to_rev_re, lambda match: name_to_rev[match.group(0)], format)
798
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500799 if seenautoinc:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500800 format = "AUTOINC+" + format
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500801
802 return format
803
804def localpath(url, d):
805 fetcher = bb.fetch2.Fetch([url], d)
806 return fetcher.localpath(url)
807
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600808def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500809 """
810 Run cmd returning the command output
811 Raise an error if interrupted or cmd fails
812 Optionally echo command output to stdout
813 Optionally remove the files/directories listed in cleanup upon failure
814 """
815
816 # Need to export PATH as binary could be in metadata paths
817 # rather than host provided
818 # Also include some other variables.
819 # FIXME: Should really include all export varaiables?
820 exportvars = ['HOME', 'PATH',
821 'HTTP_PROXY', 'http_proxy',
822 'HTTPS_PROXY', 'https_proxy',
823 'FTP_PROXY', 'ftp_proxy',
824 'FTPS_PROXY', 'ftps_proxy',
825 'NO_PROXY', 'no_proxy',
826 'ALL_PROXY', 'all_proxy',
827 'GIT_PROXY_COMMAND',
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800828 'GIT_SSH',
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500829 'GIT_SSL_CAINFO',
830 'GIT_SMART_HTTP',
831 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600832 'SOCKS5_USER', 'SOCKS5_PASSWD',
833 'DBUS_SESSION_BUS_ADDRESS',
834 'P4CONFIG']
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500835
836 if not cleanup:
837 cleanup = []
838
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800839 # If PATH contains WORKDIR which contains PV-PR which contains SRCPV we
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500840 # can end up in circular recursion here so give the option of breaking it
841 # in a data store copy.
842 try:
843 d.getVar("PV")
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800844 d.getVar("PR")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500845 except bb.data_smart.ExpansionError:
846 d = bb.data.createCopy(d)
847 d.setVar("PV", "fetcheravoidrecurse")
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800848 d.setVar("PR", "fetcheravoidrecurse")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500849
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600850 origenv = d.getVar("BB_ORIGENV", False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500851 for var in exportvars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500852 val = d.getVar(var) or (origenv and origenv.getVar(var))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500853 if val:
854 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
855
Brad Bishop316dfdd2018-06-25 12:45:53 -0400856 # Disable pseudo as it may affect ssh, potentially causing it to hang.
857 cmd = 'export PSEUDO_DISABLED=1; ' + cmd
858
Brad Bishop19323692019-04-05 15:28:33 -0400859 if workdir:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600860 logger.debug("Running '%s' in %s" % (cmd, workdir))
Brad Bishop19323692019-04-05 15:28:33 -0400861 else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600862 logger.debug("Running %s", cmd)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500863
864 success = False
865 error_message = ""
866
867 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600868 (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500869 success = True
870 except bb.process.NotFoundError as e:
871 error_message = "Fetch command %s" % (e.command)
872 except bb.process.ExecutionError as e:
873 if e.stdout:
874 output = "output:\n%s\n%s" % (e.stdout, e.stderr)
875 elif e.stderr:
876 output = "output:\n%s" % e.stderr
877 else:
878 output = "no output"
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600879 error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500880 except bb.process.CmdError as e:
881 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
882 if not success:
883 for f in cleanup:
884 try:
885 bb.utils.remove(f, True)
886 except OSError:
887 pass
888
889 raise FetchError(error_message)
890
891 return output
892
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500893def check_network_access(d, info, url):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500894 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500895 log remote network access, and error if BB_NO_NETWORK is set or the given
896 URI is untrusted
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500897 """
Brad Bishop19323692019-04-05 15:28:33 -0400898 if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500899 raise NetworkAccess(url, info)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500900 elif not trusted_network(d, url):
901 raise UntrustedUrl(url, info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500902 else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600903 logger.debug("Fetcher accessed the network with the command %s" % info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500904
905def build_mirroruris(origud, mirrors, ld):
906 uris = []
907 uds = []
908
909 replacements = {}
910 replacements["TYPE"] = origud.type
911 replacements["HOST"] = origud.host
912 replacements["PATH"] = origud.path
913 replacements["BASENAME"] = origud.path.split("/")[-1]
914 replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
915
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500916 def adduri(ud, uris, uds, mirrors, tarballs):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500917 for line in mirrors:
918 try:
919 (find, replace) = line
920 except ValueError:
921 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500922
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500923 for tarball in tarballs:
924 newuri = uri_replace(ud, find, replace, replacements, ld, tarball)
925 if not newuri or newuri in uris or newuri == origud.url:
926 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500927
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500928 if not trusted_network(ld, newuri):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600929 logger.debug("Mirror %s not in the list of trusted networks, skipping" % (newuri))
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500930 continue
Patrick Williamsd7e96312015-09-22 08:09:05 -0500931
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500932 # Create a local copy of the mirrors minus the current line
933 # this will prevent us from recursively processing the same line
934 # as well as indirect recursion A -> B -> C -> A
935 localmirrors = list(mirrors)
936 localmirrors.remove(line)
937
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500938 try:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500939 newud = FetchData(newuri, ld)
940 newud.setup_localpath(ld)
941 except bb.fetch2.BBFetchException as e:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600942 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
943 logger.debug(str(e))
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500944 try:
945 # setup_localpath of file:// urls may fail, we should still see
946 # if mirrors of the url exist
947 adduri(newud, uris, uds, localmirrors, tarballs)
948 except UnboundLocalError:
949 pass
950 continue
951 uris.append(newuri)
952 uds.append(newud)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500953
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500954 adduri(newud, uris, uds, localmirrors, tarballs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500955
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500956 adduri(origud, uris, uds, mirrors, origud.mirrortarballs or [None])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500957
958 return uris, uds
959
960def rename_bad_checksum(ud, suffix):
961 """
962 Renames files to have suffix from parameter
963 """
964
965 if ud.localpath is None:
966 return
967
968 new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
969 bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
Brad Bishop79641f22019-09-10 07:20:22 -0400970 if not bb.utils.movefile(ud.localpath, new_localpath):
971 bb.warn("Renaming %s to %s failed, grep movefile in log.do_fetch to see why" % (ud.localpath, new_localpath))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500972
973
974def try_mirror_url(fetch, origud, ud, ld, check = False):
975 # Return of None or a value means we're finished
976 # False means try another url
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500977
978 if ud.lockfile and ud.lockfile != origud.lockfile:
979 lf = bb.utils.lockfile(ud.lockfile)
980
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500981 try:
982 if check:
983 found = ud.method.checkstatus(fetch, ud, ld)
984 if found:
985 return found
986 return False
987
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500988 if not verify_donestamp(ud, ld, origud) or ud.method.need_update(ud, ld):
989 ud.method.download(ud, ld)
990 if hasattr(ud.method,"build_mirror_data"):
991 ud.method.build_mirror_data(ud, ld)
992
993 if not ud.localpath or not os.path.exists(ud.localpath):
994 return False
995
996 if ud.localpath == origud.localpath:
997 return ud.localpath
998
999 # We may be obtaining a mirror tarball which needs further processing by the real fetcher
1000 # If that tarball is a local file:// we need to provide a symlink to it
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001001 dldir = ld.getVar("DL_DIR")
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001002
1003 if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001004 # Create donestamp in old format to avoid triggering a re-download
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001005 if ud.donestamp:
1006 bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
1007 open(ud.donestamp, 'w').close()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001008 dest = os.path.join(dldir, os.path.basename(ud.localpath))
1009 if not os.path.exists(dest):
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001010 # In case this is executing without any file locks held (as is
1011 # the case for file:// URLs), two tasks may end up here at the
1012 # same time, in which case we do not want the second task to
1013 # fail when the link has already been created by the first task.
1014 try:
1015 os.symlink(ud.localpath, dest)
1016 except FileExistsError:
1017 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001018 if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
1019 origud.method.download(origud, ld)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001020 if hasattr(origud.method, "build_mirror_data"):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001021 origud.method.build_mirror_data(origud, ld)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001022 return origud.localpath
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001023 # Otherwise the result is a local file:// and we symlink to it
Andrew Geissler09209ee2020-12-13 08:44:15 -06001024 ensure_symlink(ud.localpath, origud.localpath)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001025 update_stamp(origud, ld)
1026 return ud.localpath
1027
1028 except bb.fetch2.NetworkAccess:
1029 raise
1030
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001031 except IOError as e:
Brad Bishop19323692019-04-05 15:28:33 -04001032 if e.errno in [errno.ESTALE]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001033 logger.warning("Stale Error Observed %s." % ud.url)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001034 return False
1035 raise
1036
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001037 except bb.fetch2.BBFetchException as e:
1038 if isinstance(e, ChecksumError):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001039 logger.warning("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
1040 logger.warning(str(e))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001041 if os.path.exists(ud.localpath):
1042 rename_bad_checksum(ud, e.checksum)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001043 elif isinstance(e, NoChecksumError):
1044 raise
1045 else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001046 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
1047 logger.debug(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001048 try:
1049 ud.method.clean(ud, ld)
1050 except UnboundLocalError:
1051 pass
1052 return False
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001053 finally:
1054 if ud.lockfile and ud.lockfile != origud.lockfile:
1055 bb.utils.unlockfile(lf)
1056
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001057
Andrew Geissler09209ee2020-12-13 08:44:15 -06001058def ensure_symlink(target, link_name):
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001059 if not os.path.exists(link_name):
1060 if os.path.islink(link_name):
1061 # Broken symbolic link
1062 os.unlink(link_name)
1063
1064 # In case this is executing without any file locks held (as is
1065 # the case for file:// URLs), two tasks may end up here at the
1066 # same time, in which case we do not want the second task to
1067 # fail when the link has already been created by the first task.
1068 try:
1069 os.symlink(target, link_name)
1070 except FileExistsError:
1071 pass
1072
1073
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001074def try_mirrors(fetch, d, origud, mirrors, check = False):
1075 """
1076 Try to use a mirrored version of the sources.
1077 This method will be automatically called before the fetchers go.
1078
1079 d Is a bb.data instance
1080 uri is the original uri we're trying to download
1081 mirrors is the list of mirrors we're going to try
1082 """
1083 ld = d.createCopy()
1084
1085 uris, uds = build_mirroruris(origud, mirrors, ld)
1086
1087 for index, uri in enumerate(uris):
1088 ret = try_mirror_url(fetch, origud, uds[index], ld, check)
Andrew Geissler82c905d2020-04-13 13:39:40 -05001089 if ret:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001090 return ret
1091 return None
1092
1093def trusted_network(d, url):
1094 """
1095 Use a trusted url during download if networking is enabled and
1096 BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
1097 Note: modifies SRC_URI & mirrors.
1098 """
Brad Bishop19323692019-04-05 15:28:33 -04001099 if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001100 return True
1101
1102 pkgname = d.expand(d.getVar('PN', False))
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001103 trusted_hosts = None
1104 if pkgname:
1105 trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001106
1107 if not trusted_hosts:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001108 trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001109
1110 # Not enabled.
1111 if not trusted_hosts:
1112 return True
1113
1114 scheme, network, path, user, passwd, param = decodeurl(url)
1115
1116 if not network:
1117 return True
1118
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001119 network = network.split(':')[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001120 network = network.lower()
1121
1122 for host in trusted_hosts.split(" "):
1123 host = host.lower()
1124 if host.startswith("*.") and ("." + network).endswith(host[1:]):
1125 return True
1126 if host == network:
1127 return True
1128
1129 return False
1130
1131def srcrev_internal_helper(ud, d, name):
1132 """
1133 Return:
1134 a) a source revision if specified
1135 b) latest revision if SRCREV="AUTOINC"
1136 c) None if not specified
1137 """
1138
1139 srcrev = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001140 pn = d.getVar("PN")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001141 attempts = []
1142 if name != '' and pn:
1143 attempts.append("SRCREV_%s_pn-%s" % (name, pn))
1144 if name != '':
1145 attempts.append("SRCREV_%s" % name)
1146 if pn:
1147 attempts.append("SRCREV_pn-%s" % pn)
1148 attempts.append("SRCREV")
1149
1150 for a in attempts:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001151 srcrev = d.getVar(a)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001152 if srcrev and srcrev != "INVALID":
1153 break
1154
1155 if 'rev' in ud.parm and 'tag' in ud.parm:
1156 raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
1157
1158 if 'rev' in ud.parm or 'tag' in ud.parm:
1159 if 'rev' in ud.parm:
1160 parmrev = ud.parm['rev']
1161 else:
1162 parmrev = ud.parm['tag']
1163 if srcrev == "INVALID" or not srcrev:
1164 return parmrev
1165 if srcrev != parmrev:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001166 raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001167 return parmrev
1168
1169 if srcrev == "INVALID" or not srcrev:
1170 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
1171 if srcrev == "AUTOINC":
1172 srcrev = ud.method.latest_revision(ud, d, name)
1173
1174 return srcrev
1175
1176def get_checksum_file_list(d):
1177 """ Get a list of files checksum in SRC_URI
1178
1179 Returns the resolved local paths of all local file entries in
1180 SRC_URI as a space-separated string
1181 """
1182 fetch = Fetch([], d, cache = False, localonly = True)
1183
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001184 dl_dir = d.getVar('DL_DIR')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001185 filelist = []
1186 for u in fetch.urls:
1187 ud = fetch.ud[u]
1188
1189 if ud and isinstance(ud.method, local.Local):
1190 paths = ud.method.localpaths(ud, d)
1191 for f in paths:
1192 pth = ud.decodedurl
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001193 if f.startswith(dl_dir):
1194 # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
1195 if os.path.exists(f):
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001196 bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001197 else:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001198 bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001199 filelist.append(f + ":" + str(os.path.exists(f)))
1200
1201 return " ".join(filelist)
1202
Andrew Geissler82c905d2020-04-13 13:39:40 -05001203def get_file_checksums(filelist, pn, localdirsexclude):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001204 """Get a list of the checksums for a list of local files
1205
1206 Returns the checksums for a list of local files, caching the results as
1207 it proceeds
1208
1209 """
Andrew Geissler82c905d2020-04-13 13:39:40 -05001210 return _checksum_cache.get_checksums(filelist, pn, localdirsexclude)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001211
1212
1213class FetchData(object):
1214 """
1215 A class which represents the fetcher state for a given URI.
1216 """
1217 def __init__(self, url, d, localonly = False):
1218 # localpath is the location of a downloaded result. If not set, the file is local.
1219 self.donestamp = None
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001220 self.needdonestamp = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001221 self.localfile = ""
1222 self.localpath = None
1223 self.lockfile = None
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001224 self.mirrortarballs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001225 self.basename = None
1226 self.basepath = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001227 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(d.expand(url))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001228 self.date = self.getSRCDate(d)
1229 self.url = url
1230 if not self.user and "user" in self.parm:
1231 self.user = self.parm["user"]
1232 if not self.pswd and "pswd" in self.parm:
1233 self.pswd = self.parm["pswd"]
1234 self.setup = False
1235
Andrew Geissler82c905d2020-04-13 13:39:40 -05001236 def configure_checksum(checksum_id):
1237 if "name" in self.parm:
1238 checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id)
1239 else:
1240 checksum_name = "%ssum" % checksum_id
1241
1242 setattr(self, "%s_name" % checksum_id, checksum_name)
1243
1244 if checksum_name in self.parm:
1245 checksum_expected = self.parm[checksum_name]
Andrew Geissler95ac1b82021-03-31 14:34:31 -05001246 elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az"]:
Andrew Geissler82c905d2020-04-13 13:39:40 -05001247 checksum_expected = None
1248 else:
1249 checksum_expected = d.getVarFlag("SRC_URI", checksum_name)
1250
1251 setattr(self, "%s_expected" % checksum_id, checksum_expected)
1252
1253 for checksum_id in CHECKSUM_LIST:
1254 configure_checksum(checksum_id)
1255
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001256 self.ignore_checksums = False
1257
1258 self.names = self.parm.get("name",'default').split(',')
1259
1260 self.method = None
1261 for m in methods:
1262 if m.supports(self, d):
1263 self.method = m
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001264 break
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001265
1266 if not self.method:
1267 raise NoMethodError(url)
1268
1269 if localonly and not isinstance(self.method, local.Local):
1270 raise NonLocalMethod()
1271
1272 if self.parm.get("proto", None) and "protocol" not in self.parm:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001273 logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001274 self.parm["protocol"] = self.parm.get("proto", None)
1275
1276 if hasattr(self.method, "urldata_init"):
1277 self.method.urldata_init(self, d)
1278
1279 if "localpath" in self.parm:
1280 # if user sets localpath for file, use it instead.
1281 self.localpath = self.parm["localpath"]
1282 self.basename = os.path.basename(self.localpath)
1283 elif self.localfile:
1284 self.localpath = self.method.localpath(self, d)
1285
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001286 dldir = d.getVar("DL_DIR")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001287
1288 if not self.needdonestamp:
1289 return
1290
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001291 # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
1292 if self.localpath and self.localpath.startswith(dldir):
1293 basepath = self.localpath
1294 elif self.localpath:
1295 basepath = dldir + os.sep + os.path.basename(self.localpath)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001296 elif self.basepath or self.basename:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001297 basepath = dldir + os.sep + (self.basepath or self.basename)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001298 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001299 bb.fatal("Can't determine lock path for url %s" % url)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001300
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001301 self.donestamp = basepath + '.done'
1302 self.lockfile = basepath + '.lock'
1303
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001304 def setup_revisions(self, d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001305 self.revisions = {}
1306 for name in self.names:
1307 self.revisions[name] = srcrev_internal_helper(self, d, name)
1308
1309 # add compatibility code for non name specified case
1310 if len(self.names) == 1:
1311 self.revision = self.revisions[self.names[0]]
1312
1313 def setup_localpath(self, d):
1314 if not self.localpath:
1315 self.localpath = self.method.localpath(self, d)
1316
1317 def getSRCDate(self, d):
1318 """
1319 Return the SRC Date for the component
1320
1321 d the bb.data module
1322 """
1323 if "srcdate" in self.parm:
1324 return self.parm['srcdate']
1325
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001326 pn = d.getVar("PN")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001327
1328 if pn:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001329 return d.getVar("SRCDATE_%s" % pn) or d.getVar("SRCDATE") or d.getVar("DATE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001330
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001331 return d.getVar("SRCDATE") or d.getVar("DATE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001332
1333class FetchMethod(object):
1334 """Base class for 'fetch'ing data"""
1335
1336 def __init__(self, urls=None):
1337 self.urls = []
1338
1339 def supports(self, urldata, d):
1340 """
1341 Check to see if this fetch class supports a given url.
1342 """
1343 return 0
1344
1345 def localpath(self, urldata, d):
1346 """
1347 Return the local filename of a given url assuming a successful fetch.
1348 Can also setup variables in urldata for use in go (saving code duplication
1349 and duplicate code execution)
1350 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001351 return os.path.join(d.getVar("DL_DIR"), urldata.localfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001352
1353 def supports_checksum(self, urldata):
1354 """
1355 Is localpath something that can be represented by a checksum?
1356 """
1357
1358 # We cannot compute checksums for directories
Andrew Geissler82c905d2020-04-13 13:39:40 -05001359 if os.path.isdir(urldata.localpath):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001360 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001361 return True
1362
1363 def recommends_checksum(self, urldata):
1364 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001365 Is the backend on where checksumming is recommended (should warnings
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001366 be displayed if there is no checksum)?
1367 """
1368 return False
1369
Andrew Geissler82c905d2020-04-13 13:39:40 -05001370 def verify_donestamp(self, ud, d):
1371 """
1372 Verify the donestamp file
1373 """
1374 return verify_donestamp(ud, d)
1375
1376 def update_donestamp(self, ud, d):
1377 """
1378 Update the donestamp file
1379 """
1380 update_stamp(ud, d)
1381
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001382 def _strip_leading_slashes(self, relpath):
1383 """
1384 Remove leading slash as os.path.join can't cope
1385 """
1386 while os.path.isabs(relpath):
1387 relpath = relpath[1:]
1388 return relpath
1389
1390 def setUrls(self, urls):
1391 self.__urls = urls
1392
1393 def getUrls(self):
1394 return self.__urls
1395
1396 urls = property(getUrls, setUrls, None, "Urls property")
1397
1398 def need_update(self, ud, d):
1399 """
1400 Force a fetch, even if localpath exists?
1401 """
1402 if os.path.exists(ud.localpath):
1403 return False
1404 return True
1405
1406 def supports_srcrev(self):
1407 """
1408 The fetcher supports auto source revisions (SRCREV)
1409 """
1410 return False
1411
1412 def download(self, urldata, d):
1413 """
1414 Fetch urls
1415 Assumes localpath was called first
1416 """
Brad Bishop19323692019-04-05 15:28:33 -04001417 raise NoMethodError(urldata.url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001418
1419 def unpack(self, urldata, rootdir, data):
1420 iterate = False
1421 file = urldata.localpath
1422
1423 try:
1424 unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
1425 except ValueError as exc:
1426 bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
1427 (file, urldata.parm.get('unpack')))
1428
1429 base, ext = os.path.splitext(file)
1430 if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']:
1431 efile = os.path.join(rootdir, os.path.basename(base))
1432 else:
1433 efile = file
1434 cmd = None
1435
1436 if unpack:
1437 if file.endswith('.tar'):
1438 cmd = 'tar x --no-same-owner -f %s' % file
1439 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
1440 cmd = 'tar xz --no-same-owner -f %s' % file
1441 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
1442 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
1443 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
1444 cmd = 'gzip -dc %s > %s' % (file, efile)
1445 elif file.endswith('.bz2'):
1446 cmd = 'bzip2 -dc %s > %s' % (file, efile)
Brad Bishop316dfdd2018-06-25 12:45:53 -04001447 elif file.endswith('.txz') or file.endswith('.tar.xz'):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001448 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
1449 elif file.endswith('.xz'):
1450 cmd = 'xz -dc %s > %s' % (file, efile)
1451 elif file.endswith('.tar.lz'):
1452 cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
1453 elif file.endswith('.lz'):
1454 cmd = 'lzip -dc %s > %s' % (file, efile)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001455 elif file.endswith('.tar.7z'):
1456 cmd = '7z x -so %s | tar x --no-same-owner -f -' % file
1457 elif file.endswith('.7z'):
1458 cmd = '7za x -y %s 1>/dev/null' % file
Andrew Geissler6ce62a22020-11-30 19:58:47 -06001459 elif file.endswith('.tzst') or file.endswith('.tar.zst'):
1460 cmd = 'zstd --decompress --stdout %s | tar x --no-same-owner -f -' % file
1461 elif file.endswith('.zst'):
1462 cmd = 'zstd --decompress --stdout %s > %s' % (file, efile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001463 elif file.endswith('.zip') or file.endswith('.jar'):
1464 try:
1465 dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
1466 except ValueError as exc:
1467 bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
1468 (file, urldata.parm.get('dos')))
1469 cmd = 'unzip -q -o'
1470 if dos:
1471 cmd = '%s -a' % cmd
1472 cmd = "%s '%s'" % (cmd, file)
1473 elif file.endswith('.rpm') or file.endswith('.srpm'):
1474 if 'extract' in urldata.parm:
1475 unpack_file = urldata.parm.get('extract')
1476 cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
1477 iterate = True
1478 iterate_file = unpack_file
1479 else:
1480 cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
1481 elif file.endswith('.deb') or file.endswith('.ipk'):
Brad Bishopa5c52ff2018-11-23 10:55:50 +13001482 output = subprocess.check_output(['ar', '-t', file], preexec_fn=subprocess_setup)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001483 datafile = None
1484 if output:
1485 for line in output.decode().splitlines():
1486 if line.startswith('data.tar.'):
1487 datafile = line
1488 break
1489 else:
1490 raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url)
1491 else:
1492 raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
1493 cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001494
1495 # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
1496 if 'subdir' in urldata.parm:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001497 subdir = urldata.parm.get('subdir')
1498 if os.path.isabs(subdir):
1499 if not os.path.realpath(subdir).startswith(os.path.realpath(rootdir)):
1500 raise UnpackError("subdir argument isn't a subdirectory of unpack root %s" % rootdir, urldata.url)
1501 unpackdir = subdir
1502 else:
1503 unpackdir = os.path.join(rootdir, subdir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001504 bb.utils.mkdirhier(unpackdir)
1505 else:
1506 unpackdir = rootdir
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001507
1508 if not unpack or not cmd:
1509 # If file == dest, then avoid any copies, as we already put the file into dest!
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001510 dest = os.path.join(unpackdir, os.path.basename(file))
1511 if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
1512 destdir = '.'
1513 # For file:// entries all intermediate dirs in path must be created at destination
1514 if urldata.type == "file":
1515 # Trailing '/' does a copying to wrong place
1516 urlpath = urldata.path.rstrip('/')
1517 # Want files places relative to cwd so no leading '/'
1518 urlpath = urlpath.lstrip('/')
1519 if urlpath.find("/") != -1:
1520 destdir = urlpath.rsplit("/", 1)[0] + '/'
1521 bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
Andrew Geisslerc3d88e42020-10-02 09:45:00 -05001522 cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001523
1524 if not cmd:
1525 return
1526
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001527 path = data.getVar('PATH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001528 if path:
1529 cmd = "PATH=\"%s\" %s" % (path, cmd)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001530 bb.note("Unpacking %s to %s/" % (file, unpackdir))
1531 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=unpackdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001532
1533 if ret != 0:
1534 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
1535
1536 if iterate is True:
1537 iterate_urldata = urldata
1538 iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
1539 self.unpack(urldata, rootdir, data)
1540
1541 return
1542
1543 def clean(self, urldata, d):
1544 """
1545 Clean any existing full or partial download
1546 """
1547 bb.utils.remove(urldata.localpath)
1548
1549 def try_premirror(self, urldata, d):
1550 """
1551 Should premirrors be used?
1552 """
1553 return True
1554
Andrew Geissler82c905d2020-04-13 13:39:40 -05001555 def try_mirrors(self, fetch, urldata, d, mirrors, check=False):
1556 """
1557 Try to use a mirror
1558 """
1559 return bool(try_mirrors(fetch, d, urldata, mirrors, check))
1560
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001561 def checkstatus(self, fetch, urldata, d):
1562 """
1563 Check the status of a URL
1564 Assumes localpath was called first
1565 """
Brad Bishop19323692019-04-05 15:28:33 -04001566 logger.info("URL %s could not be checked for status since no method exists.", urldata.url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001567 return True
1568
1569 def latest_revision(self, ud, d, name):
1570 """
1571 Look in the cache for the latest revision, if not present ask the SCM.
1572 """
1573 if not hasattr(self, "_latest_revision"):
Brad Bishop19323692019-04-05 15:28:33 -04001574 raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001575
1576 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
1577 key = self.generate_revision_key(ud, d, name)
1578 try:
1579 return revs[key]
1580 except KeyError:
1581 revs[key] = rev = self._latest_revision(ud, d, name)
1582 return rev
1583
1584 def sortable_revision(self, ud, d, name):
1585 latest_rev = self._build_revision(ud, d, name)
1586 return True, str(latest_rev)
1587
1588 def generate_revision_key(self, ud, d, name):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001589 return self._revision_key(ud, d, name)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001590
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001591 def latest_versionstring(self, ud, d):
1592 """
1593 Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
1594 by searching through the tags output of ls-remote, comparing
1595 versions and returning the highest match as a (version, revision) pair.
1596 """
1597 return ('', '')
1598
Andrew Geissler82c905d2020-04-13 13:39:40 -05001599 def done(self, ud, d):
1600 """
1601 Is the download done ?
1602 """
1603 if os.path.exists(ud.localpath):
1604 return True
Andrew Geissler82c905d2020-04-13 13:39:40 -05001605 return False
1606
Andrew Geissler4ed12e12020-06-05 18:00:41 -05001607 def implicit_urldata(self, ud, d):
1608 """
1609 Get a list of FetchData objects for any implicit URLs that will also
1610 be downloaded when we fetch the given URL.
1611 """
1612 return []
1613
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001614class Fetch(object):
1615 def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
1616 if localonly and cache:
1617 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
1618
1619 if len(urls) == 0:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001620 urls = d.getVar("SRC_URI").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001621 self.urls = urls
1622 self.d = d
1623 self.ud = {}
1624 self.connection_cache = connection_cache
1625
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001626 fn = d.getVar('FILE')
1627 mc = d.getVar('__BBMULTICONFIG') or ""
Andrew Geissler82c905d2020-04-13 13:39:40 -05001628 key = None
1629 if cache and fn:
1630 key = mc + fn + str(id(d))
1631 if key in urldata_cache:
1632 self.ud = urldata_cache[key]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001633
1634 for url in urls:
1635 if url not in self.ud:
1636 try:
1637 self.ud[url] = FetchData(url, d, localonly)
1638 except NonLocalMethod:
1639 if localonly:
1640 self.ud[url] = None
1641 pass
1642
Andrew Geissler82c905d2020-04-13 13:39:40 -05001643 if key:
1644 urldata_cache[key] = self.ud
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001645
1646 def localpath(self, url):
1647 if url not in self.urls:
1648 self.ud[url] = FetchData(url, self.d)
1649
1650 self.ud[url].setup_localpath(self.d)
1651 return self.d.expand(self.ud[url].localpath)
1652
1653 def localpaths(self):
1654 """
1655 Return a list of the local filenames, assuming successful fetch
1656 """
1657 local = []
1658
1659 for u in self.urls:
1660 ud = self.ud[u]
1661 ud.setup_localpath(self.d)
1662 local.append(ud.localpath)
1663
1664 return local
1665
1666 def download(self, urls=None):
1667 """
1668 Fetch all urls
1669 """
1670 if not urls:
1671 urls = self.urls
1672
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001673 network = self.d.getVar("BB_NO_NETWORK")
Brad Bishop19323692019-04-05 15:28:33 -04001674 premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001675
1676 for u in urls:
1677 ud = self.ud[u]
1678 ud.setup_localpath(self.d)
1679 m = ud.method
Andrew Geissler82c905d2020-04-13 13:39:40 -05001680 done = False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001681
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001682 if ud.lockfile:
1683 lf = bb.utils.lockfile(ud.lockfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001684
1685 try:
1686 self.d.setVar("BB_NO_NETWORK", network)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001687
Andrew Geissler82c905d2020-04-13 13:39:40 -05001688 if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
1689 done = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001690 elif m.try_premirror(ud, self.d):
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001691 logger.debug("Trying PREMIRRORS")
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001692 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
Andrew Geissler82c905d2020-04-13 13:39:40 -05001693 done = m.try_mirrors(self, ud, self.d, mirrors)
1694 if done:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001695 try:
1696 # early checksum verification so that if the checksum of the premirror
1697 # contents mismatch the fetcher can still try upstream and mirrors
Andrew Geissler82c905d2020-04-13 13:39:40 -05001698 m.update_donestamp(ud, self.d)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001699 except ChecksumError as e:
1700 logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u)
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001701 logger.debug(str(e))
Andrew Geissler82c905d2020-04-13 13:39:40 -05001702 done = False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001703
1704 if premirroronly:
1705 self.d.setVar("BB_NO_NETWORK", "1")
1706
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001707 firsterr = None
Andrew Geissler82c905d2020-04-13 13:39:40 -05001708 verified_stamp = m.verify_donestamp(ud, self.d)
1709 if not done and (not verified_stamp or m.need_update(ud, self.d)):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001710 try:
1711 if not trusted_network(self.d, ud.url):
1712 raise UntrustedUrl(ud.url)
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001713 logger.debug("Trying Upstream")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001714 m.download(ud, self.d)
1715 if hasattr(m, "build_mirror_data"):
1716 m.build_mirror_data(ud, self.d)
Andrew Geissler82c905d2020-04-13 13:39:40 -05001717 done = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001718 # early checksum verify, so that if checksum mismatched,
1719 # fetcher still have chance to fetch from mirror
Andrew Geissler82c905d2020-04-13 13:39:40 -05001720 m.update_donestamp(ud, self.d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001721
1722 except bb.fetch2.NetworkAccess:
1723 raise
1724
1725 except BBFetchException as e:
1726 if isinstance(e, ChecksumError):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001727 logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001728 logger.debug(str(e))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001729 if os.path.exists(ud.localpath):
1730 rename_bad_checksum(ud, e.checksum)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001731 elif isinstance(e, NoChecksumError):
1732 raise
1733 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001734 logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001735 logger.debug(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001736 firsterr = e
1737 # Remove any incomplete fetch
1738 if not verified_stamp:
1739 m.clean(ud, self.d)
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001740 logger.debug("Trying MIRRORS")
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001741 mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
Andrew Geissler82c905d2020-04-13 13:39:40 -05001742 done = m.try_mirrors(self, ud, self.d, mirrors)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001743
Andrew Geissler82c905d2020-04-13 13:39:40 -05001744 if not done or not m.done(ud, self.d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001745 if firsterr:
1746 logger.error(str(firsterr))
1747 raise FetchError("Unable to fetch URL from any source.", u)
1748
Andrew Geissler82c905d2020-04-13 13:39:40 -05001749 m.update_donestamp(ud, self.d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001750
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001751 except IOError as e:
Brad Bishop19323692019-04-05 15:28:33 -04001752 if e.errno in [errno.ESTALE]:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001753 logger.error("Stale Error Observed %s." % u)
1754 raise ChecksumError("Stale Error Detected")
1755
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001756 except BBFetchException as e:
1757 if isinstance(e, ChecksumError):
1758 logger.error("Checksum failure fetching %s" % u)
1759 raise
1760
1761 finally:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001762 if ud.lockfile:
1763 bb.utils.unlockfile(lf)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001764
1765 def checkstatus(self, urls=None):
1766 """
1767 Check all urls exist upstream
1768 """
1769
1770 if not urls:
1771 urls = self.urls
1772
1773 for u in urls:
1774 ud = self.ud[u]
1775 ud.setup_localpath(self.d)
1776 m = ud.method
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001777 logger.debug("Testing URL %s", u)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001778 # First try checking uri, u, from PREMIRRORS
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001779 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
Andrew Geissler82c905d2020-04-13 13:39:40 -05001780 ret = m.try_mirrors(self, ud, self.d, mirrors, True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001781 if not ret:
1782 # Next try checking from the original uri, u
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001783 ret = m.checkstatus(self, ud, self.d)
1784 if not ret:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001785 # Finally, try checking uri, u, from MIRRORS
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001786 mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
Andrew Geissler82c905d2020-04-13 13:39:40 -05001787 ret = m.try_mirrors(self, ud, self.d, mirrors, True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001788
1789 if not ret:
1790 raise FetchError("URL %s doesn't work" % u, u)
1791
1792 def unpack(self, root, urls=None):
1793 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001794 Unpack urls to root
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001795 """
1796
1797 if not urls:
1798 urls = self.urls
1799
1800 for u in urls:
1801 ud = self.ud[u]
1802 ud.setup_localpath(self.d)
1803
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001804 if ud.lockfile:
1805 lf = bb.utils.lockfile(ud.lockfile)
1806
1807 ud.method.unpack(ud, root, self.d)
1808
1809 if ud.lockfile:
1810 bb.utils.unlockfile(lf)
1811
1812 def clean(self, urls=None):
1813 """
1814 Clean files that the fetcher gets or places
1815 """
1816
1817 if not urls:
1818 urls = self.urls
1819
1820 for url in urls:
1821 if url not in self.ud:
Brad Bishop19323692019-04-05 15:28:33 -04001822 self.ud[url] = FetchData(url, self.d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001823 ud = self.ud[url]
1824 ud.setup_localpath(self.d)
1825
1826 if not ud.localfile and ud.localpath is None:
1827 continue
1828
1829 if ud.lockfile:
1830 lf = bb.utils.lockfile(ud.lockfile)
1831
1832 ud.method.clean(ud, self.d)
1833 if ud.donestamp:
1834 bb.utils.remove(ud.donestamp)
1835
1836 if ud.lockfile:
1837 bb.utils.unlockfile(lf)
1838
Andrew Geissler4ed12e12020-06-05 18:00:41 -05001839 def expanded_urldata(self, urls=None):
1840 """
1841 Get an expanded list of FetchData objects covering both the given
1842 URLS and any additional implicit URLs that are added automatically by
1843 the appropriate FetchMethod.
1844 """
1845
1846 if not urls:
1847 urls = self.urls
1848
1849 urldata = []
1850 for url in urls:
1851 ud = self.ud[url]
1852 urldata.append(ud)
1853 urldata += ud.method.implicit_urldata(ud, self.d)
1854
1855 return urldata
1856
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001857class FetchConnectionCache(object):
1858 """
1859 A class which represents an container for socket connections.
1860 """
1861 def __init__(self):
1862 self.cache = {}
1863
1864 def get_connection_name(self, host, port):
1865 return host + ':' + str(port)
1866
1867 def add_connection(self, host, port, connection):
1868 cn = self.get_connection_name(host, port)
1869
1870 if cn not in self.cache:
1871 self.cache[cn] = connection
1872
1873 def get_connection(self, host, port):
1874 connection = None
1875
1876 cn = self.get_connection_name(host, port)
1877 if cn in self.cache:
1878 connection = self.cache[cn]
1879
1880 return connection
1881
1882 def remove_connection(self, host, port):
1883 cn = self.get_connection_name(host, port)
1884 if cn in self.cache:
1885 self.cache[cn].close()
1886 del self.cache[cn]
1887
1888 def close_connections(self):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001889 for cn in list(self.cache.keys()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001890 self.cache[cn].close()
1891 del self.cache[cn]
1892
1893from . import cvs
1894from . import git
1895from . import gitsm
1896from . import gitannex
1897from . import local
1898from . import svn
1899from . import wget
1900from . import ssh
1901from . import sftp
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001902from . import s3
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001903from . import perforce
1904from . import bzr
1905from . import hg
1906from . import osc
1907from . import repo
1908from . import clearcase
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001909from . import npm
Andrew Geissler82c905d2020-04-13 13:39:40 -05001910from . import npmsw
Andrew Geissler95ac1b82021-03-31 14:34:31 -05001911from . import az
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001912
1913methods.append(local.Local())
1914methods.append(wget.Wget())
1915methods.append(svn.Svn())
1916methods.append(git.Git())
1917methods.append(gitsm.GitSM())
1918methods.append(gitannex.GitANNEX())
1919methods.append(cvs.Cvs())
1920methods.append(ssh.SSH())
1921methods.append(sftp.SFTP())
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001922methods.append(s3.S3())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001923methods.append(perforce.Perforce())
1924methods.append(bzr.Bzr())
1925methods.append(hg.Hg())
1926methods.append(osc.Osc())
1927methods.append(repo.Repo())
1928methods.append(clearcase.ClearCase())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001929methods.append(npm.Npm())
Andrew Geissler82c905d2020-04-13 13:39:40 -05001930methods.append(npmsw.NpmShrinkWrap())
Andrew Geissler95ac1b82021-03-31 14:34:31 -05001931methods.append(az.Az())