blob: 666cc1306a0a0266a2b6f57a7923400fe2fab542 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001"""
2BitBake 'Fetch' implementations
3
4Classes for obtaining upstream sources for the
5BitBake build tools.
6"""
7
8# Copyright (C) 2003, 2004 Chris Larson
9# Copyright (C) 2012 Intel Corporation
10#
Brad Bishopc342db32019-05-15 21:57:59 -040011# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050012#
13# Based on functions from the base bb module, Copyright 2003 Holger Schurig
14
Patrick Williamsc124f4f2015-09-15 14:41:29 -050015import os, re
16import signal
Patrick Williamsc124f4f2015-09-15 14:41:29 -050017import logging
Patrick Williamsc0f7c042017-02-23 20:41:17 -060018import urllib.request, urllib.parse, urllib.error
19if 'git' not in urllib.parse.uses_netloc:
20 urllib.parse.uses_netloc.append('git')
21import operator
22import collections
23import subprocess
24import pickle
Brad Bishop6e60e8b2018-02-01 10:27:11 -050025import errno
Patrick Williamsc124f4f2015-09-15 14:41:29 -050026import bb.persist_data, bb.utils
27import bb.checksum
Patrick Williamsc124f4f2015-09-15 14:41:29 -050028import bb.process
Brad Bishopd7bf8c12018-02-25 22:55:05 -050029import bb.event
Patrick Williamsc124f4f2015-09-15 14:41:29 -050030
31__version__ = "2"
32_checksum_cache = bb.checksum.FileChecksumCache()
33
34logger = logging.getLogger("BitBake.Fetcher")
35
Andrew Geissler82c905d2020-04-13 13:39:40 -050036CHECKSUM_LIST = [ "md5", "sha256", "sha1", "sha384", "sha512" ]
37SHOWN_CHECKSUM_LIST = ["sha256"]
38
Patrick Williamsc124f4f2015-09-15 14:41:29 -050039class BBFetchException(Exception):
40 """Class all fetch exceptions inherit from"""
41 def __init__(self, message):
Brad Bishopd7bf8c12018-02-25 22:55:05 -050042 self.msg = message
43 Exception.__init__(self, message)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050044
45 def __str__(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -050046 return self.msg
Patrick Williamsc124f4f2015-09-15 14:41:29 -050047
48class UntrustedUrl(BBFetchException):
49 """Exception raised when encountering a host not listed in BB_ALLOWED_NETWORKS"""
50 def __init__(self, url, message=''):
51 if message:
52 msg = message
53 else:
54 msg = "The URL: '%s' is not trusted and cannot be used" % url
55 self.url = url
56 BBFetchException.__init__(self, msg)
57 self.args = (url,)
58
59class MalformedUrl(BBFetchException):
60 """Exception raised when encountering an invalid url"""
61 def __init__(self, url, message=''):
Brad Bishopd7bf8c12018-02-25 22:55:05 -050062 if message:
63 msg = message
64 else:
65 msg = "The URL: '%s' is invalid and cannot be interpreted" % url
66 self.url = url
67 BBFetchException.__init__(self, msg)
68 self.args = (url,)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050069
70class FetchError(BBFetchException):
71 """General fetcher exception when something happens incorrectly"""
72 def __init__(self, message, url = None):
Brad Bishopd7bf8c12018-02-25 22:55:05 -050073 if url:
Patrick Williamsc124f4f2015-09-15 14:41:29 -050074 msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
Brad Bishopd7bf8c12018-02-25 22:55:05 -050075 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -050076 msg = "Fetcher failure: %s" % message
Brad Bishopd7bf8c12018-02-25 22:55:05 -050077 self.url = url
78 BBFetchException.__init__(self, msg)
79 self.args = (message, url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050080
81class ChecksumError(FetchError):
82 """Exception when mismatched checksum encountered"""
83 def __init__(self, message, url = None, checksum = None):
84 self.checksum = checksum
85 FetchError.__init__(self, message, url)
86
87class NoChecksumError(FetchError):
88 """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
89
90class UnpackError(BBFetchException):
91 """General fetcher exception when something happens incorrectly when unpacking"""
92 def __init__(self, message, url):
Brad Bishopd7bf8c12018-02-25 22:55:05 -050093 msg = "Unpack failure for URL: '%s'. %s" % (url, message)
94 self.url = url
95 BBFetchException.__init__(self, msg)
96 self.args = (message, url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050097
98class NoMethodError(BBFetchException):
99 """Exception raised when there is no method to obtain a supplied url or set of urls"""
100 def __init__(self, url):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500101 msg = "Could not find a fetcher which supports the URL: '%s'" % url
102 self.url = url
103 BBFetchException.__init__(self, msg)
104 self.args = (url,)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500105
106class MissingParameterError(BBFetchException):
107 """Exception raised when a fetch method is missing a critical parameter in the url"""
108 def __init__(self, missing, url):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500109 msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
110 self.url = url
111 self.missing = missing
112 BBFetchException.__init__(self, msg)
113 self.args = (missing, url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500114
115class ParameterError(BBFetchException):
116 """Exception raised when a url cannot be proccessed due to invalid parameters."""
117 def __init__(self, message, url):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500118 msg = "URL: '%s' has invalid parameters. %s" % (url, message)
119 self.url = url
120 BBFetchException.__init__(self, msg)
121 self.args = (message, url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500122
123class NetworkAccess(BBFetchException):
124 """Exception raised when network access is disabled but it is required."""
125 def __init__(self, url, cmd):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500126 msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
127 self.url = url
128 self.cmd = cmd
129 BBFetchException.__init__(self, msg)
130 self.args = (url, cmd)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500131
132class NonLocalMethod(Exception):
133 def __init__(self):
134 Exception.__init__(self)
135
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500136class MissingChecksumEvent(bb.event.Event):
Andrew Geissler82c905d2020-04-13 13:39:40 -0500137 def __init__(self, url, **checksums):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500138 self.url = url
Andrew Geissler82c905d2020-04-13 13:39:40 -0500139 self.checksums = checksums
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500140 bb.event.Event.__init__(self)
141
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500142
143class URI(object):
144 """
145 A class representing a generic URI, with methods for
146 accessing the URI components, and stringifies to the
147 URI.
148
149 It is constructed by calling it with a URI, or setting
150 the attributes manually:
151
152 uri = URI("http://example.com/")
153
154 uri = URI()
155 uri.scheme = 'http'
156 uri.hostname = 'example.com'
157 uri.path = '/'
158
159 It has the following attributes:
160
161 * scheme (read/write)
162 * userinfo (authentication information) (read/write)
163 * username (read/write)
164 * password (read/write)
165
166 Note, password is deprecated as of RFC 3986.
167
168 * hostname (read/write)
169 * port (read/write)
170 * hostport (read only)
171 "hostname:port", if both are set, otherwise just "hostname"
172 * path (read/write)
173 * path_quoted (read/write)
174 A URI quoted version of path
175 * params (dict) (read/write)
176 * query (dict) (read/write)
177 * relative (bool) (read only)
178 True if this is a "relative URI", (e.g. file:foo.diff)
179
180 It stringifies to the URI itself.
181
182 Some notes about relative URIs: while it's specified that
183 a URI beginning with <scheme>:// should either be directly
184 followed by a hostname or a /, the old URI handling of the
185 fetch2 library did not comform to this. Therefore, this URI
186 class has some kludges to make sure that URIs are parsed in
187 a way comforming to bitbake's current usage. This URI class
188 supports the following:
189
190 file:relative/path.diff (IETF compliant)
191 git:relative/path.git (IETF compliant)
192 git:///absolute/path.git (IETF compliant)
193 file:///absolute/path.diff (IETF compliant)
194
195 file://relative/path.diff (not IETF compliant)
196
197 But it does not support the following:
198
199 file://hostname/absolute/path.diff (would be IETF compliant)
200
201 Note that the last case only applies to a list of
202 "whitelisted" schemes (currently only file://), that requires
203 its URIs to not have a network location.
204 """
205
206 _relative_schemes = ['file', 'git']
207 _netloc_forbidden = ['file']
208
209 def __init__(self, uri=None):
210 self.scheme = ''
211 self.userinfo = ''
212 self.hostname = ''
213 self.port = None
214 self._path = ''
215 self.params = {}
216 self.query = {}
217 self.relative = False
218
219 if not uri:
220 return
221
222 # We hijack the URL parameters, since the way bitbake uses
223 # them are not quite RFC compliant.
224 uri, param_str = (uri.split(";", 1) + [None])[:2]
225
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600226 urlp = urllib.parse.urlparse(uri)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500227 self.scheme = urlp.scheme
228
229 reparse = 0
230
231 # Coerce urlparse to make URI scheme use netloc
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600232 if not self.scheme in urllib.parse.uses_netloc:
233 urllib.parse.uses_params.append(self.scheme)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500234 reparse = 1
235
236 # Make urlparse happy(/ier) by converting local resources
237 # to RFC compliant URL format. E.g.:
238 # file://foo.diff -> file:foo.diff
239 if urlp.scheme in self._netloc_forbidden:
240 uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
241 reparse = 1
242
243 if reparse:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600244 urlp = urllib.parse.urlparse(uri)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500245
246 # Identify if the URI is relative or not
247 if urlp.scheme in self._relative_schemes and \
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800248 re.compile(r"^\w+:(?!//)").match(uri):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500249 self.relative = True
250
251 if not self.relative:
252 self.hostname = urlp.hostname or ''
253 self.port = urlp.port
254
255 self.userinfo += urlp.username or ''
256
257 if urlp.password:
258 self.userinfo += ':%s' % urlp.password
259
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600260 self.path = urllib.parse.unquote(urlp.path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500261
262 if param_str:
263 self.params = self._param_str_split(param_str, ";")
264 if urlp.query:
265 self.query = self._param_str_split(urlp.query, "&")
266
267 def __str__(self):
268 userinfo = self.userinfo
269 if userinfo:
270 userinfo += '@'
271
272 return "%s:%s%s%s%s%s%s" % (
273 self.scheme,
274 '' if self.relative else '//',
275 userinfo,
276 self.hostport,
277 self.path_quoted,
278 self._query_str(),
279 self._param_str())
280
281 def _param_str(self):
282 return (
283 ''.join([';', self._param_str_join(self.params, ";")])
284 if self.params else '')
285
286 def _query_str(self):
287 return (
288 ''.join(['?', self._param_str_join(self.query, "&")])
289 if self.query else '')
290
291 def _param_str_split(self, string, elmdelim, kvdelim="="):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600292 ret = collections.OrderedDict()
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600293 for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim) if x]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500294 ret[k] = v
295 return ret
296
297 def _param_str_join(self, dict_, elmdelim, kvdelim="="):
298 return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
299
300 @property
301 def hostport(self):
302 if not self.port:
303 return self.hostname
304 return "%s:%d" % (self.hostname, self.port)
305
306 @property
307 def path_quoted(self):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600308 return urllib.parse.quote(self.path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500309
310 @path_quoted.setter
311 def path_quoted(self, path):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600312 self.path = urllib.parse.unquote(path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500313
314 @property
315 def path(self):
316 return self._path
317
318 @path.setter
319 def path(self, path):
320 self._path = path
321
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500322 if not path or re.compile("^/").match(path):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500323 self.relative = False
324 else:
325 self.relative = True
326
327 @property
328 def username(self):
329 if self.userinfo:
330 return (self.userinfo.split(":", 1))[0]
331 return ''
332
333 @username.setter
334 def username(self, username):
335 password = self.password
336 self.userinfo = username
337 if password:
338 self.userinfo += ":%s" % password
339
340 @property
341 def password(self):
342 if self.userinfo and ":" in self.userinfo:
343 return (self.userinfo.split(":", 1))[1]
344 return ''
345
346 @password.setter
347 def password(self, password):
348 self.userinfo = "%s:%s" % (self.username, password)
349
350def decodeurl(url):
351 """Decodes an URL into the tokens (scheme, network location, path,
352 user, password, parameters).
353 """
354
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500355 m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500356 if not m:
357 raise MalformedUrl(url)
358
359 type = m.group('type')
360 location = m.group('location')
361 if not location:
362 raise MalformedUrl(url)
363 user = m.group('user')
364 parm = m.group('parm')
365
366 locidx = location.find('/')
367 if locidx != -1 and type.lower() != 'file':
368 host = location[:locidx]
369 path = location[locidx:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500370 elif type.lower() == 'file':
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500371 host = ""
372 path = location
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500373 else:
374 host = location
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800375 path = "/"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500376 if user:
377 m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
378 if m:
379 user = m.group('user')
380 pswd = m.group('pswd')
381 else:
382 user = ''
383 pswd = ''
384
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600385 p = collections.OrderedDict()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500386 if parm:
387 for s in parm.split(';'):
388 if s:
389 if not '=' in s:
390 raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
391 s1, s2 = s.split('=')
392 p[s1] = s2
393
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600394 return type, host, urllib.parse.unquote(path), user, pswd, p
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500395
396def encodeurl(decoded):
397 """Encodes a URL from tokens (scheme, network location, path,
398 user, password, parameters).
399 """
400
401 type, host, path, user, pswd, p = decoded
402
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500403 if not type:
404 raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
405 url = '%s://' % type
406 if user and type != "file":
407 url += "%s" % user
408 if pswd:
409 url += ":%s" % pswd
410 url += "@"
411 if host and type != "file":
412 url += "%s" % host
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500413 if path:
414 # Standardise path to ensure comparisons work
415 while '//' in path:
416 path = path.replace("//", "/")
417 url += "%s" % urllib.parse.quote(path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500418 if p:
419 for parm in p:
420 url += ";%s=%s" % (parm, p[parm])
421
422 return url
423
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500424def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500425 if not ud.url or not uri_find or not uri_replace:
426 logger.error("uri_replace: passed an undefined value, not replacing")
427 return None
428 uri_decoded = list(decodeurl(ud.url))
429 uri_find_decoded = list(decodeurl(uri_find))
430 uri_replace_decoded = list(decodeurl(uri_replace))
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600431 logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500432 result_decoded = ['', '', '', '', '', {}]
433 for loc, i in enumerate(uri_find_decoded):
434 result_decoded[loc] = uri_decoded[loc]
435 regexp = i
436 if loc == 0 and regexp and not regexp.endswith("$"):
437 # Leaving the type unanchored can mean "https" matching "file" can become "files"
438 # which is clearly undesirable.
439 regexp += "$"
440 if loc == 5:
441 # Handle URL parameters
442 if i:
443 # Any specified URL parameters must match
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800444 for k in uri_find_decoded[loc]:
445 if uri_decoded[loc][k] != uri_find_decoded[loc][k]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500446 return None
447 # Overwrite any specified replacement parameters
448 for k in uri_replace_decoded[loc]:
449 for l in replacements:
450 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
451 result_decoded[loc][k] = uri_replace_decoded[loc][k]
452 elif (re.match(regexp, uri_decoded[loc])):
453 if not uri_replace_decoded[loc]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500454 result_decoded[loc] = ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500455 else:
456 for k in replacements:
457 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
458 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
Patrick Williamsd7e96312015-09-22 08:09:05 -0500459 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500460 if loc == 2:
461 # Handle path manipulations
462 basename = None
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500463 if uri_decoded[0] != uri_replace_decoded[0] and mirrortarball:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500464 # If the source and destination url types differ, must be a mirrortarball mapping
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500465 basename = os.path.basename(mirrortarball)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500466 # Kill parameters, they make no sense for mirror tarballs
467 uri_decoded[5] = {}
468 elif ud.localpath and ud.method.supports_checksum(ud):
Andrew Geissler5199d832021-09-24 16:47:35 -0500469 basename = os.path.basename(uri_decoded[loc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500470 if basename and not result_decoded[loc].endswith(basename):
471 result_decoded[loc] = os.path.join(result_decoded[loc], basename)
472 else:
473 return None
474 result = encodeurl(result_decoded)
475 if result == ud.url:
476 return None
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600477 logger.debug2("For url %s returning %s" % (ud.url, result))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500478 return result
479
480methods = []
481urldata_cache = {}
482saved_headrevs = {}
483
484def fetcher_init(d):
485 """
486 Called to initialize the fetchers once the configuration data is known.
487 Calls before this must not hit the cache.
488 """
Andrew Geissler82c905d2020-04-13 13:39:40 -0500489
490 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
491 try:
492 # fetcher_init is called multiple times, so make sure we only save the
493 # revs the first time it is called.
494 if not bb.fetch2.saved_headrevs:
495 bb.fetch2.saved_headrevs = dict(revs)
496 except:
497 pass
498
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500499 # When to drop SCM head revisions controlled by user policy
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500500 srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500501 if srcrev_policy == "cache":
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600502 logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500503 elif srcrev_policy == "clear":
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600504 logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500505 revs.clear()
506 else:
507 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
508
509 _checksum_cache.init_cache(d)
510
511 for m in methods:
512 if hasattr(m, "init"):
513 m.init(d)
514
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500515def fetcher_parse_save():
516 _checksum_cache.save_extras()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500517
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500518def fetcher_parse_done():
519 _checksum_cache.save_merge()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500520
Brad Bishop19323692019-04-05 15:28:33 -0400521def fetcher_compare_revisions(d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500522 """
Andrew Geissler82c905d2020-04-13 13:39:40 -0500523 Compare the revisions in the persistent cache with the saved values from
524 when bitbake was started and return true if they have changed.
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500525 """
526
Andrew Geissler82c905d2020-04-13 13:39:40 -0500527 headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d))
528 return headrevs != bb.fetch2.saved_headrevs
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500529
530def mirror_from_string(data):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500531 mirrors = (data or "").replace('\\n',' ').split()
532 # Split into pairs
533 if len(mirrors) % 2 != 0:
534 bb.warn('Invalid mirror data %s, should have paired members.' % data)
535 return list(zip(*[iter(mirrors)]*2))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536
537def verify_checksum(ud, d, precomputed={}):
538 """
539 verify the MD5 and SHA256 checksum for downloaded src
540
541 Raises a FetchError if one or both of the SRC_URI checksums do not match
542 the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
543 checksums specified.
544
545 Returns a dict of checksums that can be stored in a done stamp file and
546 passed in as precomputed parameter in a later call to avoid re-computing
547 the checksums from the file. This allows verifying the checksums of the
548 file against those in the recipe each time, rather than only after
549 downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
550 """
551
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500552 if ud.ignore_checksums or not ud.method.supports_checksum(ud):
553 return {}
554
Andrew Geissler82c905d2020-04-13 13:39:40 -0500555 def compute_checksum_info(checksum_id):
556 checksum_name = getattr(ud, "%s_name" % checksum_id)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500557
Andrew Geissler82c905d2020-04-13 13:39:40 -0500558 if checksum_id in precomputed:
559 checksum_data = precomputed[checksum_id]
560 else:
561 checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(ud.localpath)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500562
Andrew Geissler82c905d2020-04-13 13:39:40 -0500563 checksum_expected = getattr(ud, "%s_expected" % checksum_id)
564
Andrew Geissler09036742021-06-25 14:25:14 -0500565 if checksum_expected == '':
566 checksum_expected = None
567
Andrew Geissler82c905d2020-04-13 13:39:40 -0500568 return {
569 "id": checksum_id,
570 "name": checksum_name,
571 "data": checksum_data,
572 "expected": checksum_expected
573 }
574
575 checksum_infos = []
576 for checksum_id in CHECKSUM_LIST:
577 checksum_infos.append(compute_checksum_info(checksum_id))
578
579 checksum_dict = {ci["id"] : ci["data"] for ci in checksum_infos}
580 checksum_event = {"%ssum" % ci["id"] : ci["data"] for ci in checksum_infos}
581
582 for ci in checksum_infos:
583 if ci["id"] in SHOWN_CHECKSUM_LIST:
584 checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])]
585
586 # If no checksum has been provided
587 if ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos):
588 messages = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500589 strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500590
Andrew Geissler82c905d2020-04-13 13:39:40 -0500591 # If strict checking enabled and neither sum defined, raise error
592 if strict == "1":
593 messages.append("No checksum specified for '%s', please add at " \
594 "least one to the recipe:" % ud.localpath)
595 messages.extend(checksum_lines)
596 logger.error("\n".join(messages))
597 raise NoChecksumError("Missing SRC_URI checksum", ud.url)
598
599 bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500600
601 if strict == "ignore":
Andrew Geissler82c905d2020-04-13 13:39:40 -0500602 return checksum_dict
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500603
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500604 # Log missing sums so user can more easily add them
Andrew Geissler82c905d2020-04-13 13:39:40 -0500605 messages.append("Missing checksum for '%s', consider adding at " \
606 "least one to the recipe:" % ud.localpath)
607 messages.extend(checksum_lines)
608 logger.warning("\n".join(messages))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500609
610 # We want to alert the user if a checksum is defined in the recipe but
611 # it does not match.
Andrew Geissler82c905d2020-04-13 13:39:40 -0500612 messages = []
613 messages.append("Checksum mismatch!")
614 bad_checksum = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500615
Andrew Geissler82c905d2020-04-13 13:39:40 -0500616 for ci in checksum_infos:
617 if ci["expected"] and ci["expected"] != ci["data"]:
Andrew Geissler09036742021-06-25 14:25:14 -0500618 messages.append("File: '%s' has %s checksum '%s' when '%s' was " \
Andrew Geissler82c905d2020-04-13 13:39:40 -0500619 "expected" % (ud.localpath, ci["id"], ci["data"], ci["expected"]))
620 bad_checksum = ci["data"]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500621
Andrew Geissler82c905d2020-04-13 13:39:40 -0500622 if bad_checksum:
623 messages.append("If this change is expected (e.g. you have upgraded " \
624 "to a new version without updating the checksums) " \
625 "then you can use these lines within the recipe:")
626 messages.extend(checksum_lines)
627 messages.append("Otherwise you should retry the download and/or " \
628 "check with upstream to determine if the file has " \
629 "become corrupted or otherwise unexpectedly modified.")
630 raise ChecksumError("\n".join(messages), ud.url, bad_checksum)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500631
Andrew Geissler82c905d2020-04-13 13:39:40 -0500632 return checksum_dict
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500633
634def verify_donestamp(ud, d, origud=None):
635 """
636 Check whether the done stamp file has the right checksums (if the fetch
637 method supports them). If it doesn't, delete the done stamp and force
638 a re-download.
639
640 Returns True, if the donestamp exists and is valid, False otherwise. When
641 returning False, any existing done stamps are removed.
642 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500643 if not ud.needdonestamp or (origud and not origud.needdonestamp):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500644 return True
645
Brad Bishop316dfdd2018-06-25 12:45:53 -0400646 if not os.path.exists(ud.localpath):
647 # local path does not exist
648 if os.path.exists(ud.donestamp):
649 # done stamp exists, but the downloaded file does not; the done stamp
650 # must be incorrect, re-trigger the download
651 bb.utils.remove(ud.donestamp)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500652 return False
653
654 if (not ud.method.supports_checksum(ud) or
655 (origud and not origud.method.supports_checksum(origud))):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400656 # if done stamp exists and checksums not supported; assume the local
657 # file is current
658 return os.path.exists(ud.donestamp)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500659
660 precomputed_checksums = {}
661 # Only re-use the precomputed checksums if the donestamp is newer than the
662 # file. Do not rely on the mtime of directories, though. If ud.localpath is
663 # a directory, there will probably not be any checksums anyway.
Brad Bishop316dfdd2018-06-25 12:45:53 -0400664 if os.path.exists(ud.donestamp) and (os.path.isdir(ud.localpath) or
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500665 os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
666 try:
667 with open(ud.donestamp, "rb") as cachefile:
668 pickled = pickle.Unpickler(cachefile)
669 precomputed_checksums.update(pickled.load())
670 except Exception as e:
671 # Avoid the warnings on the upgrade path from emtpy done stamp
672 # files to those containing the checksums.
673 if not isinstance(e, EOFError):
674 # Ignore errors, they aren't fatal
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600675 logger.warning("Couldn't load checksums from donestamp %s: %s "
676 "(msg: %s)" % (ud.donestamp, type(e).__name__,
677 str(e)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500678
679 try:
680 checksums = verify_checksum(ud, d, precomputed_checksums)
681 # If the cache file did not have the checksums, compute and store them
682 # as an upgrade path from the previous done stamp file format.
683 if checksums != precomputed_checksums:
684 with open(ud.donestamp, "wb") as cachefile:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600685 p = pickle.Pickler(cachefile, 2)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500686 p.dump(checksums)
687 return True
688 except ChecksumError as e:
689 # Checksums failed to verify, trigger re-download and remove the
690 # incorrect stamp file.
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600691 logger.warning("Checksum mismatch for local file %s\n"
692 "Cleaning and trying again." % ud.localpath)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500693 if os.path.exists(ud.localpath):
694 rename_bad_checksum(ud, e.checksum)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500695 bb.utils.remove(ud.donestamp)
696 return False
697
698
699def update_stamp(ud, d):
700 """
701 donestamp is file stamp indicating the whole fetching is done
702 this function update the stamp after verifying the checksum
703 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500704 if not ud.needdonestamp:
705 return
706
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500707 if os.path.exists(ud.donestamp):
708 # Touch the done stamp file to show active use of the download
709 try:
710 os.utime(ud.donestamp, None)
711 except:
712 # Errors aren't fatal here
713 pass
714 else:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500715 try:
716 checksums = verify_checksum(ud, d)
717 # Store the checksums for later re-verification against the recipe
718 with open(ud.donestamp, "wb") as cachefile:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600719 p = pickle.Pickler(cachefile, 2)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500720 p.dump(checksums)
721 except ChecksumError as e:
722 # Checksums failed to verify, trigger re-download and remove the
723 # incorrect stamp file.
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600724 logger.warning("Checksum mismatch for local file %s\n"
725 "Cleaning and trying again." % ud.localpath)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500726 if os.path.exists(ud.localpath):
727 rename_bad_checksum(ud, e.checksum)
728 bb.utils.remove(ud.donestamp)
729 raise
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500730
731def subprocess_setup():
732 # Python installs a SIGPIPE handler by default. This is usually not what
733 # non-Python subprocesses expect.
734 # SIGPIPE errors are known issues with gzip/bash
735 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
736
737def get_autorev(d):
738 # only not cache src rev in autorev case
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500739 if d.getVar('BB_SRCREV_POLICY') != "cache":
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500740 d.setVar('BB_DONT_CACHE', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500741 return "AUTOINC"
742
743def get_srcrev(d, method_name='sortable_revision'):
744 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500745 Return the revision string, usually for use in the version string (PV) of the current package
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500746 Most packages usually only have one SCM so we just pass on the call.
747 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
748 have been set.
749
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500750 The idea here is that we put the string "AUTOINC+" into return value if the revisions are not
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500751 incremental, other code is then responsible for turning that into an increasing value (if needed)
752
753 A method_name can be supplied to retrieve an alternatively formatted revision from a fetcher, if
754 that fetcher provides a method with the given name and the same signature as sortable_revision.
755 """
756
Andrew Geissler5199d832021-09-24 16:47:35 -0500757 recursion = d.getVar("__BBINSRCREV")
758 if recursion:
759 raise FetchError("There are recursive references in fetcher variables, likely through SRC_URI")
760 d.setVar("__BBINSRCREV", True)
761
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500762 scms = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500763 fetcher = Fetch(d.getVar('SRC_URI').split(), d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500764 urldata = fetcher.ud
765 for u in urldata:
766 if urldata[u].method.supports_srcrev():
767 scms.append(u)
768
769 if len(scms) == 0:
770 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
771
772 if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
773 autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
774 if len(rev) > 10:
775 rev = rev[:10]
Andrew Geissler5199d832021-09-24 16:47:35 -0500776 d.delVar("__BBINSRCREV")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500777 if autoinc:
778 return "AUTOINC+" + rev
779 return rev
780
781 #
782 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
783 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500784 format = d.getVar('SRCREV_FORMAT')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500785 if not format:
Brad Bishop19323692019-04-05 15:28:33 -0400786 raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.\n"\
787 "The SCMs are:\n%s" % '\n'.join(scms))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500788
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600789 name_to_rev = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500790 seenautoinc = False
791 for scm in scms:
792 ud = urldata[scm]
793 for name in ud.names:
794 autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
795 seenautoinc = seenautoinc or autoinc
796 if len(rev) > 10:
797 rev = rev[:10]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600798 name_to_rev[name] = rev
799 # Replace names by revisions in the SRCREV_FORMAT string. The approach used
800 # here can handle names being prefixes of other names and names appearing
801 # as substrings in revisions (in which case the name should not be
802 # expanded). The '|' regular expression operator tries matches from left to
803 # right, so we need to sort the names with the longest ones first.
804 names_descending_len = sorted(name_to_rev, key=len, reverse=True)
805 name_to_rev_re = "|".join(re.escape(name) for name in names_descending_len)
806 format = re.sub(name_to_rev_re, lambda match: name_to_rev[match.group(0)], format)
807
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500808 if seenautoinc:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500809 format = "AUTOINC+" + format
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500810
Andrew Geissler5199d832021-09-24 16:47:35 -0500811 d.delVar("__BBINSRCREV")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500812 return format
813
814def localpath(url, d):
815 fetcher = bb.fetch2.Fetch([url], d)
816 return fetcher.localpath(url)
817
Patrick Williams0ca19cc2021-08-16 14:03:13 -0500818# Need to export PATH as binary could be in metadata paths
819# rather than host provided
820# Also include some other variables.
821FETCH_EXPORT_VARS = ['HOME', 'PATH',
822 'HTTP_PROXY', 'http_proxy',
823 'HTTPS_PROXY', 'https_proxy',
824 'FTP_PROXY', 'ftp_proxy',
825 'FTPS_PROXY', 'ftps_proxy',
826 'NO_PROXY', 'no_proxy',
827 'ALL_PROXY', 'all_proxy',
828 'GIT_PROXY_COMMAND',
829 'GIT_SSH',
830 'GIT_SSL_CAINFO',
831 'GIT_SMART_HTTP',
832 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
833 'SOCKS5_USER', 'SOCKS5_PASSWD',
834 'DBUS_SESSION_BUS_ADDRESS',
835 'P4CONFIG',
836 'SSL_CERT_FILE',
Andrew Geissler5199d832021-09-24 16:47:35 -0500837 'AWS_PROFILE',
Patrick Williams0ca19cc2021-08-16 14:03:13 -0500838 'AWS_ACCESS_KEY_ID',
839 'AWS_SECRET_ACCESS_KEY',
840 'AWS_DEFAULT_REGION']
841
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600842def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500843 """
844 Run cmd returning the command output
845 Raise an error if interrupted or cmd fails
846 Optionally echo command output to stdout
847 Optionally remove the files/directories listed in cleanup upon failure
848 """
849
Patrick Williams0ca19cc2021-08-16 14:03:13 -0500850 exportvars = FETCH_EXPORT_VARS
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500851
852 if not cleanup:
853 cleanup = []
854
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800855 # If PATH contains WORKDIR which contains PV-PR which contains SRCPV we
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500856 # can end up in circular recursion here so give the option of breaking it
857 # in a data store copy.
858 try:
859 d.getVar("PV")
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800860 d.getVar("PR")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500861 except bb.data_smart.ExpansionError:
862 d = bb.data.createCopy(d)
863 d.setVar("PV", "fetcheravoidrecurse")
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800864 d.setVar("PR", "fetcheravoidrecurse")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500865
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600866 origenv = d.getVar("BB_ORIGENV", False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500867 for var in exportvars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500868 val = d.getVar(var) or (origenv and origenv.getVar(var))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500869 if val:
870 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
871
Brad Bishop316dfdd2018-06-25 12:45:53 -0400872 # Disable pseudo as it may affect ssh, potentially causing it to hang.
873 cmd = 'export PSEUDO_DISABLED=1; ' + cmd
874
Brad Bishop19323692019-04-05 15:28:33 -0400875 if workdir:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600876 logger.debug("Running '%s' in %s" % (cmd, workdir))
Brad Bishop19323692019-04-05 15:28:33 -0400877 else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600878 logger.debug("Running %s", cmd)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500879
880 success = False
881 error_message = ""
882
883 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600884 (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500885 success = True
886 except bb.process.NotFoundError as e:
Andrew Geisslereff27472021-10-29 15:35:00 -0500887 error_message = "Fetch command %s not found" % (e.command)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500888 except bb.process.ExecutionError as e:
889 if e.stdout:
890 output = "output:\n%s\n%s" % (e.stdout, e.stderr)
891 elif e.stderr:
892 output = "output:\n%s" % e.stderr
893 else:
894 output = "no output"
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600895 error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500896 except bb.process.CmdError as e:
897 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
898 if not success:
899 for f in cleanup:
900 try:
901 bb.utils.remove(f, True)
902 except OSError:
903 pass
904
905 raise FetchError(error_message)
906
907 return output
908
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500909def check_network_access(d, info, url):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500910 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500911 log remote network access, and error if BB_NO_NETWORK is set or the given
912 URI is untrusted
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500913 """
Brad Bishop19323692019-04-05 15:28:33 -0400914 if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500915 raise NetworkAccess(url, info)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500916 elif not trusted_network(d, url):
917 raise UntrustedUrl(url, info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500918 else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600919 logger.debug("Fetcher accessed the network with the command %s" % info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500920
921def build_mirroruris(origud, mirrors, ld):
922 uris = []
923 uds = []
924
925 replacements = {}
926 replacements["TYPE"] = origud.type
927 replacements["HOST"] = origud.host
928 replacements["PATH"] = origud.path
929 replacements["BASENAME"] = origud.path.split("/")[-1]
930 replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
931
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500932 def adduri(ud, uris, uds, mirrors, tarballs):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500933 for line in mirrors:
934 try:
935 (find, replace) = line
936 except ValueError:
937 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500938
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500939 for tarball in tarballs:
940 newuri = uri_replace(ud, find, replace, replacements, ld, tarball)
941 if not newuri or newuri in uris or newuri == origud.url:
942 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500943
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500944 if not trusted_network(ld, newuri):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600945 logger.debug("Mirror %s not in the list of trusted networks, skipping" % (newuri))
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500946 continue
Patrick Williamsd7e96312015-09-22 08:09:05 -0500947
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500948 # Create a local copy of the mirrors minus the current line
949 # this will prevent us from recursively processing the same line
950 # as well as indirect recursion A -> B -> C -> A
951 localmirrors = list(mirrors)
952 localmirrors.remove(line)
953
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500954 try:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500955 newud = FetchData(newuri, ld)
956 newud.setup_localpath(ld)
957 except bb.fetch2.BBFetchException as e:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600958 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
959 logger.debug(str(e))
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500960 try:
961 # setup_localpath of file:// urls may fail, we should still see
962 # if mirrors of the url exist
963 adduri(newud, uris, uds, localmirrors, tarballs)
964 except UnboundLocalError:
965 pass
966 continue
967 uris.append(newuri)
968 uds.append(newud)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500969
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500970 adduri(newud, uris, uds, localmirrors, tarballs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500971
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500972 adduri(origud, uris, uds, mirrors, origud.mirrortarballs or [None])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500973
974 return uris, uds
975
976def rename_bad_checksum(ud, suffix):
977 """
978 Renames files to have suffix from parameter
979 """
980
981 if ud.localpath is None:
982 return
983
984 new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
985 bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
Brad Bishop79641f22019-09-10 07:20:22 -0400986 if not bb.utils.movefile(ud.localpath, new_localpath):
987 bb.warn("Renaming %s to %s failed, grep movefile in log.do_fetch to see why" % (ud.localpath, new_localpath))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500988
989
990def try_mirror_url(fetch, origud, ud, ld, check = False):
991 # Return of None or a value means we're finished
992 # False means try another url
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500993
994 if ud.lockfile and ud.lockfile != origud.lockfile:
995 lf = bb.utils.lockfile(ud.lockfile)
996
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500997 try:
998 if check:
999 found = ud.method.checkstatus(fetch, ud, ld)
1000 if found:
1001 return found
1002 return False
1003
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001004 if not verify_donestamp(ud, ld, origud) or ud.method.need_update(ud, ld):
1005 ud.method.download(ud, ld)
1006 if hasattr(ud.method,"build_mirror_data"):
1007 ud.method.build_mirror_data(ud, ld)
1008
1009 if not ud.localpath or not os.path.exists(ud.localpath):
1010 return False
1011
1012 if ud.localpath == origud.localpath:
1013 return ud.localpath
1014
1015 # We may be obtaining a mirror tarball which needs further processing by the real fetcher
1016 # If that tarball is a local file:// we need to provide a symlink to it
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001017 dldir = ld.getVar("DL_DIR")
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001018
1019 if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001020 # Create donestamp in old format to avoid triggering a re-download
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001021 if ud.donestamp:
1022 bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
1023 open(ud.donestamp, 'w').close()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001024 dest = os.path.join(dldir, os.path.basename(ud.localpath))
1025 if not os.path.exists(dest):
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001026 # In case this is executing without any file locks held (as is
1027 # the case for file:// URLs), two tasks may end up here at the
1028 # same time, in which case we do not want the second task to
1029 # fail when the link has already been created by the first task.
1030 try:
1031 os.symlink(ud.localpath, dest)
1032 except FileExistsError:
1033 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001034 if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
1035 origud.method.download(origud, ld)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001036 if hasattr(origud.method, "build_mirror_data"):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001037 origud.method.build_mirror_data(origud, ld)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001038 return origud.localpath
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001039 # Otherwise the result is a local file:// and we symlink to it
Andrew Geissler09209ee2020-12-13 08:44:15 -06001040 ensure_symlink(ud.localpath, origud.localpath)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001041 update_stamp(origud, ld)
1042 return ud.localpath
1043
1044 except bb.fetch2.NetworkAccess:
1045 raise
1046
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001047 except IOError as e:
Brad Bishop19323692019-04-05 15:28:33 -04001048 if e.errno in [errno.ESTALE]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001049 logger.warning("Stale Error Observed %s." % ud.url)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001050 return False
1051 raise
1052
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001053 except bb.fetch2.BBFetchException as e:
1054 if isinstance(e, ChecksumError):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001055 logger.warning("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
1056 logger.warning(str(e))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001057 if os.path.exists(ud.localpath):
1058 rename_bad_checksum(ud, e.checksum)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001059 elif isinstance(e, NoChecksumError):
1060 raise
1061 else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001062 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
1063 logger.debug(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001064 try:
1065 ud.method.clean(ud, ld)
1066 except UnboundLocalError:
1067 pass
1068 return False
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001069 finally:
1070 if ud.lockfile and ud.lockfile != origud.lockfile:
1071 bb.utils.unlockfile(lf)
1072
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001073
Andrew Geissler09209ee2020-12-13 08:44:15 -06001074def ensure_symlink(target, link_name):
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001075 if not os.path.exists(link_name):
1076 if os.path.islink(link_name):
1077 # Broken symbolic link
1078 os.unlink(link_name)
1079
1080 # In case this is executing without any file locks held (as is
1081 # the case for file:// URLs), two tasks may end up here at the
1082 # same time, in which case we do not want the second task to
1083 # fail when the link has already been created by the first task.
1084 try:
1085 os.symlink(target, link_name)
1086 except FileExistsError:
1087 pass
1088
1089
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001090def try_mirrors(fetch, d, origud, mirrors, check = False):
1091 """
1092 Try to use a mirrored version of the sources.
1093 This method will be automatically called before the fetchers go.
1094
1095 d Is a bb.data instance
1096 uri is the original uri we're trying to download
1097 mirrors is the list of mirrors we're going to try
1098 """
1099 ld = d.createCopy()
1100
1101 uris, uds = build_mirroruris(origud, mirrors, ld)
1102
1103 for index, uri in enumerate(uris):
1104 ret = try_mirror_url(fetch, origud, uds[index], ld, check)
Andrew Geissler82c905d2020-04-13 13:39:40 -05001105 if ret:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001106 return ret
1107 return None
1108
1109def trusted_network(d, url):
1110 """
1111 Use a trusted url during download if networking is enabled and
1112 BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
1113 Note: modifies SRC_URI & mirrors.
1114 """
Brad Bishop19323692019-04-05 15:28:33 -04001115 if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001116 return True
1117
1118 pkgname = d.expand(d.getVar('PN', False))
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001119 trusted_hosts = None
1120 if pkgname:
1121 trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001122
1123 if not trusted_hosts:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001124 trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001125
1126 # Not enabled.
1127 if not trusted_hosts:
1128 return True
1129
1130 scheme, network, path, user, passwd, param = decodeurl(url)
1131
1132 if not network:
1133 return True
1134
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001135 network = network.split(':')[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001136 network = network.lower()
1137
1138 for host in trusted_hosts.split(" "):
1139 host = host.lower()
1140 if host.startswith("*.") and ("." + network).endswith(host[1:]):
1141 return True
1142 if host == network:
1143 return True
1144
1145 return False
1146
1147def srcrev_internal_helper(ud, d, name):
1148 """
1149 Return:
1150 a) a source revision if specified
1151 b) latest revision if SRCREV="AUTOINC"
1152 c) None if not specified
1153 """
1154
1155 srcrev = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001156 pn = d.getVar("PN")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001157 attempts = []
1158 if name != '' and pn:
Patrick Williams213cb262021-08-07 19:21:33 -05001159 attempts.append("SRCREV_%s:pn-%s" % (name, pn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001160 if name != '':
1161 attempts.append("SRCREV_%s" % name)
1162 if pn:
Patrick Williams213cb262021-08-07 19:21:33 -05001163 attempts.append("SRCREV:pn-%s" % pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001164 attempts.append("SRCREV")
1165
1166 for a in attempts:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001167 srcrev = d.getVar(a)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001168 if srcrev and srcrev != "INVALID":
1169 break
1170
1171 if 'rev' in ud.parm and 'tag' in ud.parm:
1172 raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
1173
1174 if 'rev' in ud.parm or 'tag' in ud.parm:
1175 if 'rev' in ud.parm:
1176 parmrev = ud.parm['rev']
1177 else:
1178 parmrev = ud.parm['tag']
1179 if srcrev == "INVALID" or not srcrev:
1180 return parmrev
1181 if srcrev != parmrev:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001182 raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001183 return parmrev
1184
1185 if srcrev == "INVALID" or not srcrev:
1186 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
1187 if srcrev == "AUTOINC":
1188 srcrev = ud.method.latest_revision(ud, d, name)
1189
1190 return srcrev
1191
1192def get_checksum_file_list(d):
1193 """ Get a list of files checksum in SRC_URI
1194
1195 Returns the resolved local paths of all local file entries in
1196 SRC_URI as a space-separated string
1197 """
1198 fetch = Fetch([], d, cache = False, localonly = True)
1199
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001200 dl_dir = d.getVar('DL_DIR')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001201 filelist = []
1202 for u in fetch.urls:
1203 ud = fetch.ud[u]
1204
1205 if ud and isinstance(ud.method, local.Local):
1206 paths = ud.method.localpaths(ud, d)
1207 for f in paths:
1208 pth = ud.decodedurl
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001209 if f.startswith(dl_dir):
1210 # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
1211 if os.path.exists(f):
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001212 bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001213 else:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001214 bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001215 filelist.append(f + ":" + str(os.path.exists(f)))
1216
1217 return " ".join(filelist)
1218
Andrew Geissler82c905d2020-04-13 13:39:40 -05001219def get_file_checksums(filelist, pn, localdirsexclude):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001220 """Get a list of the checksums for a list of local files
1221
1222 Returns the checksums for a list of local files, caching the results as
1223 it proceeds
1224
1225 """
Andrew Geissler82c905d2020-04-13 13:39:40 -05001226 return _checksum_cache.get_checksums(filelist, pn, localdirsexclude)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001227
1228
1229class FetchData(object):
1230 """
1231 A class which represents the fetcher state for a given URI.
1232 """
1233 def __init__(self, url, d, localonly = False):
1234 # localpath is the location of a downloaded result. If not set, the file is local.
1235 self.donestamp = None
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001236 self.needdonestamp = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001237 self.localfile = ""
1238 self.localpath = None
1239 self.lockfile = None
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001240 self.mirrortarballs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001241 self.basename = None
1242 self.basepath = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001243 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(d.expand(url))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001244 self.date = self.getSRCDate(d)
1245 self.url = url
1246 if not self.user and "user" in self.parm:
1247 self.user = self.parm["user"]
1248 if not self.pswd and "pswd" in self.parm:
1249 self.pswd = self.parm["pswd"]
1250 self.setup = False
1251
Andrew Geissler82c905d2020-04-13 13:39:40 -05001252 def configure_checksum(checksum_id):
1253 if "name" in self.parm:
1254 checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id)
1255 else:
1256 checksum_name = "%ssum" % checksum_id
1257
1258 setattr(self, "%s_name" % checksum_id, checksum_name)
1259
1260 if checksum_name in self.parm:
1261 checksum_expected = self.parm[checksum_name]
Andrew Geissler95ac1b82021-03-31 14:34:31 -05001262 elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az"]:
Andrew Geissler82c905d2020-04-13 13:39:40 -05001263 checksum_expected = None
1264 else:
1265 checksum_expected = d.getVarFlag("SRC_URI", checksum_name)
1266
1267 setattr(self, "%s_expected" % checksum_id, checksum_expected)
1268
1269 for checksum_id in CHECKSUM_LIST:
1270 configure_checksum(checksum_id)
1271
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001272 self.ignore_checksums = False
1273
1274 self.names = self.parm.get("name",'default').split(',')
1275
1276 self.method = None
1277 for m in methods:
1278 if m.supports(self, d):
1279 self.method = m
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001280 break
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001281
1282 if not self.method:
1283 raise NoMethodError(url)
1284
1285 if localonly and not isinstance(self.method, local.Local):
1286 raise NonLocalMethod()
1287
1288 if self.parm.get("proto", None) and "protocol" not in self.parm:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001289 logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001290 self.parm["protocol"] = self.parm.get("proto", None)
1291
1292 if hasattr(self.method, "urldata_init"):
1293 self.method.urldata_init(self, d)
1294
1295 if "localpath" in self.parm:
1296 # if user sets localpath for file, use it instead.
1297 self.localpath = self.parm["localpath"]
1298 self.basename = os.path.basename(self.localpath)
1299 elif self.localfile:
1300 self.localpath = self.method.localpath(self, d)
1301
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001302 dldir = d.getVar("DL_DIR")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001303
1304 if not self.needdonestamp:
1305 return
1306
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001307 # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
1308 if self.localpath and self.localpath.startswith(dldir):
1309 basepath = self.localpath
1310 elif self.localpath:
1311 basepath = dldir + os.sep + os.path.basename(self.localpath)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001312 elif self.basepath or self.basename:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001313 basepath = dldir + os.sep + (self.basepath or self.basename)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001314 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001315 bb.fatal("Can't determine lock path for url %s" % url)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001316
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001317 self.donestamp = basepath + '.done'
1318 self.lockfile = basepath + '.lock'
1319
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001320 def setup_revisions(self, d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001321 self.revisions = {}
1322 for name in self.names:
1323 self.revisions[name] = srcrev_internal_helper(self, d, name)
1324
1325 # add compatibility code for non name specified case
1326 if len(self.names) == 1:
1327 self.revision = self.revisions[self.names[0]]
1328
1329 def setup_localpath(self, d):
1330 if not self.localpath:
1331 self.localpath = self.method.localpath(self, d)
1332
1333 def getSRCDate(self, d):
1334 """
1335 Return the SRC Date for the component
1336
1337 d the bb.data module
1338 """
1339 if "srcdate" in self.parm:
1340 return self.parm['srcdate']
1341
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001342 pn = d.getVar("PN")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001343
1344 if pn:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001345 return d.getVar("SRCDATE_%s" % pn) or d.getVar("SRCDATE") or d.getVar("DATE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001346
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001347 return d.getVar("SRCDATE") or d.getVar("DATE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001348
1349class FetchMethod(object):
1350 """Base class for 'fetch'ing data"""
1351
1352 def __init__(self, urls=None):
1353 self.urls = []
1354
1355 def supports(self, urldata, d):
1356 """
1357 Check to see if this fetch class supports a given url.
1358 """
1359 return 0
1360
1361 def localpath(self, urldata, d):
1362 """
1363 Return the local filename of a given url assuming a successful fetch.
1364 Can also setup variables in urldata for use in go (saving code duplication
1365 and duplicate code execution)
1366 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001367 return os.path.join(d.getVar("DL_DIR"), urldata.localfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001368
1369 def supports_checksum(self, urldata):
1370 """
1371 Is localpath something that can be represented by a checksum?
1372 """
1373
1374 # We cannot compute checksums for directories
Andrew Geissler82c905d2020-04-13 13:39:40 -05001375 if os.path.isdir(urldata.localpath):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001376 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001377 return True
1378
1379 def recommends_checksum(self, urldata):
1380 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001381 Is the backend on where checksumming is recommended (should warnings
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001382 be displayed if there is no checksum)?
1383 """
1384 return False
1385
Andrew Geissler82c905d2020-04-13 13:39:40 -05001386 def verify_donestamp(self, ud, d):
1387 """
1388 Verify the donestamp file
1389 """
1390 return verify_donestamp(ud, d)
1391
1392 def update_donestamp(self, ud, d):
1393 """
1394 Update the donestamp file
1395 """
1396 update_stamp(ud, d)
1397
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001398 def _strip_leading_slashes(self, relpath):
1399 """
1400 Remove leading slash as os.path.join can't cope
1401 """
1402 while os.path.isabs(relpath):
1403 relpath = relpath[1:]
1404 return relpath
1405
1406 def setUrls(self, urls):
1407 self.__urls = urls
1408
1409 def getUrls(self):
1410 return self.__urls
1411
1412 urls = property(getUrls, setUrls, None, "Urls property")
1413
1414 def need_update(self, ud, d):
1415 """
1416 Force a fetch, even if localpath exists?
1417 """
1418 if os.path.exists(ud.localpath):
1419 return False
1420 return True
1421
1422 def supports_srcrev(self):
1423 """
1424 The fetcher supports auto source revisions (SRCREV)
1425 """
1426 return False
1427
1428 def download(self, urldata, d):
1429 """
1430 Fetch urls
1431 Assumes localpath was called first
1432 """
Brad Bishop19323692019-04-05 15:28:33 -04001433 raise NoMethodError(urldata.url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001434
1435 def unpack(self, urldata, rootdir, data):
1436 iterate = False
1437 file = urldata.localpath
1438
1439 try:
1440 unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
1441 except ValueError as exc:
1442 bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
1443 (file, urldata.parm.get('unpack')))
1444
1445 base, ext = os.path.splitext(file)
1446 if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']:
1447 efile = os.path.join(rootdir, os.path.basename(base))
1448 else:
1449 efile = file
1450 cmd = None
1451
1452 if unpack:
1453 if file.endswith('.tar'):
1454 cmd = 'tar x --no-same-owner -f %s' % file
1455 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
1456 cmd = 'tar xz --no-same-owner -f %s' % file
1457 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
1458 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
1459 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
1460 cmd = 'gzip -dc %s > %s' % (file, efile)
1461 elif file.endswith('.bz2'):
1462 cmd = 'bzip2 -dc %s > %s' % (file, efile)
Brad Bishop316dfdd2018-06-25 12:45:53 -04001463 elif file.endswith('.txz') or file.endswith('.tar.xz'):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001464 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
1465 elif file.endswith('.xz'):
1466 cmd = 'xz -dc %s > %s' % (file, efile)
1467 elif file.endswith('.tar.lz'):
1468 cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
1469 elif file.endswith('.lz'):
1470 cmd = 'lzip -dc %s > %s' % (file, efile)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001471 elif file.endswith('.tar.7z'):
1472 cmd = '7z x -so %s | tar x --no-same-owner -f -' % file
1473 elif file.endswith('.7z'):
1474 cmd = '7za x -y %s 1>/dev/null' % file
Andrew Geissler6ce62a22020-11-30 19:58:47 -06001475 elif file.endswith('.tzst') or file.endswith('.tar.zst'):
1476 cmd = 'zstd --decompress --stdout %s | tar x --no-same-owner -f -' % file
1477 elif file.endswith('.zst'):
1478 cmd = 'zstd --decompress --stdout %s > %s' % (file, efile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001479 elif file.endswith('.zip') or file.endswith('.jar'):
1480 try:
1481 dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
1482 except ValueError as exc:
1483 bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
1484 (file, urldata.parm.get('dos')))
1485 cmd = 'unzip -q -o'
1486 if dos:
1487 cmd = '%s -a' % cmd
1488 cmd = "%s '%s'" % (cmd, file)
1489 elif file.endswith('.rpm') or file.endswith('.srpm'):
1490 if 'extract' in urldata.parm:
1491 unpack_file = urldata.parm.get('extract')
1492 cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
1493 iterate = True
1494 iterate_file = unpack_file
1495 else:
1496 cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
1497 elif file.endswith('.deb') or file.endswith('.ipk'):
Brad Bishopa5c52ff2018-11-23 10:55:50 +13001498 output = subprocess.check_output(['ar', '-t', file], preexec_fn=subprocess_setup)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001499 datafile = None
1500 if output:
1501 for line in output.decode().splitlines():
1502 if line.startswith('data.tar.'):
1503 datafile = line
1504 break
1505 else:
1506 raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url)
1507 else:
1508 raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
1509 cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001510
1511 # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
1512 if 'subdir' in urldata.parm:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001513 subdir = urldata.parm.get('subdir')
1514 if os.path.isabs(subdir):
1515 if not os.path.realpath(subdir).startswith(os.path.realpath(rootdir)):
1516 raise UnpackError("subdir argument isn't a subdirectory of unpack root %s" % rootdir, urldata.url)
1517 unpackdir = subdir
1518 else:
1519 unpackdir = os.path.join(rootdir, subdir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001520 bb.utils.mkdirhier(unpackdir)
1521 else:
1522 unpackdir = rootdir
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001523
1524 if not unpack or not cmd:
1525 # If file == dest, then avoid any copies, as we already put the file into dest!
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001526 dest = os.path.join(unpackdir, os.path.basename(file))
1527 if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
1528 destdir = '.'
1529 # For file:// entries all intermediate dirs in path must be created at destination
1530 if urldata.type == "file":
1531 # Trailing '/' does a copying to wrong place
1532 urlpath = urldata.path.rstrip('/')
1533 # Want files places relative to cwd so no leading '/'
1534 urlpath = urlpath.lstrip('/')
1535 if urlpath.find("/") != -1:
1536 destdir = urlpath.rsplit("/", 1)[0] + '/'
1537 bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
Andrew Geisslerc3d88e42020-10-02 09:45:00 -05001538 cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001539
1540 if not cmd:
1541 return
1542
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001543 path = data.getVar('PATH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001544 if path:
1545 cmd = "PATH=\"%s\" %s" % (path, cmd)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001546 bb.note("Unpacking %s to %s/" % (file, unpackdir))
1547 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=unpackdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001548
1549 if ret != 0:
1550 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
1551
1552 if iterate is True:
1553 iterate_urldata = urldata
1554 iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
1555 self.unpack(urldata, rootdir, data)
1556
1557 return
1558
1559 def clean(self, urldata, d):
1560 """
1561 Clean any existing full or partial download
1562 """
1563 bb.utils.remove(urldata.localpath)
1564
1565 def try_premirror(self, urldata, d):
1566 """
1567 Should premirrors be used?
1568 """
1569 return True
1570
Andrew Geissler82c905d2020-04-13 13:39:40 -05001571 def try_mirrors(self, fetch, urldata, d, mirrors, check=False):
1572 """
1573 Try to use a mirror
1574 """
1575 return bool(try_mirrors(fetch, d, urldata, mirrors, check))
1576
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001577 def checkstatus(self, fetch, urldata, d):
1578 """
1579 Check the status of a URL
1580 Assumes localpath was called first
1581 """
Brad Bishop19323692019-04-05 15:28:33 -04001582 logger.info("URL %s could not be checked for status since no method exists.", urldata.url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001583 return True
1584
1585 def latest_revision(self, ud, d, name):
1586 """
1587 Look in the cache for the latest revision, if not present ask the SCM.
1588 """
1589 if not hasattr(self, "_latest_revision"):
Brad Bishop19323692019-04-05 15:28:33 -04001590 raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001591
1592 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
1593 key = self.generate_revision_key(ud, d, name)
1594 try:
1595 return revs[key]
1596 except KeyError:
1597 revs[key] = rev = self._latest_revision(ud, d, name)
1598 return rev
1599
1600 def sortable_revision(self, ud, d, name):
1601 latest_rev = self._build_revision(ud, d, name)
1602 return True, str(latest_rev)
1603
1604 def generate_revision_key(self, ud, d, name):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001605 return self._revision_key(ud, d, name)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001606
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001607 def latest_versionstring(self, ud, d):
1608 """
1609 Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
1610 by searching through the tags output of ls-remote, comparing
1611 versions and returning the highest match as a (version, revision) pair.
1612 """
1613 return ('', '')
1614
Andrew Geissler82c905d2020-04-13 13:39:40 -05001615 def done(self, ud, d):
1616 """
1617 Is the download done ?
1618 """
1619 if os.path.exists(ud.localpath):
1620 return True
Andrew Geissler82c905d2020-04-13 13:39:40 -05001621 return False
1622
Andrew Geissler4ed12e12020-06-05 18:00:41 -05001623 def implicit_urldata(self, ud, d):
1624 """
1625 Get a list of FetchData objects for any implicit URLs that will also
1626 be downloaded when we fetch the given URL.
1627 """
1628 return []
1629
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001630class Fetch(object):
1631 def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
1632 if localonly and cache:
1633 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
1634
1635 if len(urls) == 0:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001636 urls = d.getVar("SRC_URI").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001637 self.urls = urls
1638 self.d = d
1639 self.ud = {}
1640 self.connection_cache = connection_cache
1641
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001642 fn = d.getVar('FILE')
1643 mc = d.getVar('__BBMULTICONFIG') or ""
Andrew Geissler82c905d2020-04-13 13:39:40 -05001644 key = None
1645 if cache and fn:
1646 key = mc + fn + str(id(d))
1647 if key in urldata_cache:
1648 self.ud = urldata_cache[key]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001649
1650 for url in urls:
1651 if url not in self.ud:
1652 try:
1653 self.ud[url] = FetchData(url, d, localonly)
1654 except NonLocalMethod:
1655 if localonly:
1656 self.ud[url] = None
1657 pass
1658
Andrew Geissler82c905d2020-04-13 13:39:40 -05001659 if key:
1660 urldata_cache[key] = self.ud
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001661
1662 def localpath(self, url):
1663 if url not in self.urls:
1664 self.ud[url] = FetchData(url, self.d)
1665
1666 self.ud[url].setup_localpath(self.d)
1667 return self.d.expand(self.ud[url].localpath)
1668
1669 def localpaths(self):
1670 """
1671 Return a list of the local filenames, assuming successful fetch
1672 """
1673 local = []
1674
1675 for u in self.urls:
1676 ud = self.ud[u]
1677 ud.setup_localpath(self.d)
1678 local.append(ud.localpath)
1679
1680 return local
1681
1682 def download(self, urls=None):
1683 """
1684 Fetch all urls
1685 """
1686 if not urls:
1687 urls = self.urls
1688
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001689 network = self.d.getVar("BB_NO_NETWORK")
Brad Bishop19323692019-04-05 15:28:33 -04001690 premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001691
1692 for u in urls:
1693 ud = self.ud[u]
1694 ud.setup_localpath(self.d)
1695 m = ud.method
Andrew Geissler82c905d2020-04-13 13:39:40 -05001696 done = False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001697
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001698 if ud.lockfile:
1699 lf = bb.utils.lockfile(ud.lockfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001700
1701 try:
1702 self.d.setVar("BB_NO_NETWORK", network)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001703
Andrew Geissler82c905d2020-04-13 13:39:40 -05001704 if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
1705 done = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001706 elif m.try_premirror(ud, self.d):
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001707 logger.debug("Trying PREMIRRORS")
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001708 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
Andrew Geissler82c905d2020-04-13 13:39:40 -05001709 done = m.try_mirrors(self, ud, self.d, mirrors)
1710 if done:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001711 try:
1712 # early checksum verification so that if the checksum of the premirror
1713 # contents mismatch the fetcher can still try upstream and mirrors
Andrew Geissler82c905d2020-04-13 13:39:40 -05001714 m.update_donestamp(ud, self.d)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001715 except ChecksumError as e:
1716 logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u)
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001717 logger.debug(str(e))
Andrew Geissler82c905d2020-04-13 13:39:40 -05001718 done = False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001719
1720 if premirroronly:
1721 self.d.setVar("BB_NO_NETWORK", "1")
1722
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001723 firsterr = None
Andrew Geisslereff27472021-10-29 15:35:00 -05001724 verified_stamp = False
1725 if done:
1726 verified_stamp = m.verify_donestamp(ud, self.d)
Andrew Geissler82c905d2020-04-13 13:39:40 -05001727 if not done and (not verified_stamp or m.need_update(ud, self.d)):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001728 try:
1729 if not trusted_network(self.d, ud.url):
1730 raise UntrustedUrl(ud.url)
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001731 logger.debug("Trying Upstream")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001732 m.download(ud, self.d)
1733 if hasattr(m, "build_mirror_data"):
1734 m.build_mirror_data(ud, self.d)
Andrew Geissler82c905d2020-04-13 13:39:40 -05001735 done = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001736 # early checksum verify, so that if checksum mismatched,
1737 # fetcher still have chance to fetch from mirror
Andrew Geissler82c905d2020-04-13 13:39:40 -05001738 m.update_donestamp(ud, self.d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001739
1740 except bb.fetch2.NetworkAccess:
1741 raise
1742
1743 except BBFetchException as e:
1744 if isinstance(e, ChecksumError):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001745 logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001746 logger.debug(str(e))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001747 if os.path.exists(ud.localpath):
1748 rename_bad_checksum(ud, e.checksum)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001749 elif isinstance(e, NoChecksumError):
1750 raise
1751 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001752 logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001753 logger.debug(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001754 firsterr = e
1755 # Remove any incomplete fetch
1756 if not verified_stamp:
1757 m.clean(ud, self.d)
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001758 logger.debug("Trying MIRRORS")
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001759 mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
Andrew Geissler82c905d2020-04-13 13:39:40 -05001760 done = m.try_mirrors(self, ud, self.d, mirrors)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001761
Andrew Geissler82c905d2020-04-13 13:39:40 -05001762 if not done or not m.done(ud, self.d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001763 if firsterr:
1764 logger.error(str(firsterr))
1765 raise FetchError("Unable to fetch URL from any source.", u)
1766
Andrew Geissler82c905d2020-04-13 13:39:40 -05001767 m.update_donestamp(ud, self.d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001768
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001769 except IOError as e:
Brad Bishop19323692019-04-05 15:28:33 -04001770 if e.errno in [errno.ESTALE]:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001771 logger.error("Stale Error Observed %s." % u)
1772 raise ChecksumError("Stale Error Detected")
1773
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001774 except BBFetchException as e:
1775 if isinstance(e, ChecksumError):
1776 logger.error("Checksum failure fetching %s" % u)
1777 raise
1778
1779 finally:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001780 if ud.lockfile:
1781 bb.utils.unlockfile(lf)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001782
1783 def checkstatus(self, urls=None):
1784 """
Andrew Geisslereff27472021-10-29 15:35:00 -05001785 Check all URLs exist upstream.
1786
1787 Returns None if the URLs exist, raises FetchError if the check wasn't
1788 successful but there wasn't an error (such as file not found), and
1789 raises other exceptions in error cases.
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001790 """
1791
1792 if not urls:
1793 urls = self.urls
1794
1795 for u in urls:
1796 ud = self.ud[u]
1797 ud.setup_localpath(self.d)
1798 m = ud.method
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001799 logger.debug("Testing URL %s", u)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001800 # First try checking uri, u, from PREMIRRORS
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001801 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
Andrew Geissler82c905d2020-04-13 13:39:40 -05001802 ret = m.try_mirrors(self, ud, self.d, mirrors, True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001803 if not ret:
1804 # Next try checking from the original uri, u
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001805 ret = m.checkstatus(self, ud, self.d)
1806 if not ret:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001807 # Finally, try checking uri, u, from MIRRORS
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001808 mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
Andrew Geissler82c905d2020-04-13 13:39:40 -05001809 ret = m.try_mirrors(self, ud, self.d, mirrors, True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001810
1811 if not ret:
1812 raise FetchError("URL %s doesn't work" % u, u)
1813
1814 def unpack(self, root, urls=None):
1815 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001816 Unpack urls to root
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001817 """
1818
1819 if not urls:
1820 urls = self.urls
1821
1822 for u in urls:
1823 ud = self.ud[u]
1824 ud.setup_localpath(self.d)
1825
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001826 if ud.lockfile:
1827 lf = bb.utils.lockfile(ud.lockfile)
1828
1829 ud.method.unpack(ud, root, self.d)
1830
1831 if ud.lockfile:
1832 bb.utils.unlockfile(lf)
1833
1834 def clean(self, urls=None):
1835 """
1836 Clean files that the fetcher gets or places
1837 """
1838
1839 if not urls:
1840 urls = self.urls
1841
1842 for url in urls:
1843 if url not in self.ud:
Brad Bishop19323692019-04-05 15:28:33 -04001844 self.ud[url] = FetchData(url, self.d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001845 ud = self.ud[url]
1846 ud.setup_localpath(self.d)
1847
1848 if not ud.localfile and ud.localpath is None:
1849 continue
1850
1851 if ud.lockfile:
1852 lf = bb.utils.lockfile(ud.lockfile)
1853
1854 ud.method.clean(ud, self.d)
1855 if ud.donestamp:
1856 bb.utils.remove(ud.donestamp)
1857
1858 if ud.lockfile:
1859 bb.utils.unlockfile(lf)
1860
Andrew Geissler4ed12e12020-06-05 18:00:41 -05001861 def expanded_urldata(self, urls=None):
1862 """
1863 Get an expanded list of FetchData objects covering both the given
1864 URLS and any additional implicit URLs that are added automatically by
1865 the appropriate FetchMethod.
1866 """
1867
1868 if not urls:
1869 urls = self.urls
1870
1871 urldata = []
1872 for url in urls:
1873 ud = self.ud[url]
1874 urldata.append(ud)
1875 urldata += ud.method.implicit_urldata(ud, self.d)
1876
1877 return urldata
1878
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001879class FetchConnectionCache(object):
1880 """
1881 A class which represents an container for socket connections.
1882 """
1883 def __init__(self):
1884 self.cache = {}
1885
1886 def get_connection_name(self, host, port):
1887 return host + ':' + str(port)
1888
1889 def add_connection(self, host, port, connection):
1890 cn = self.get_connection_name(host, port)
1891
1892 if cn not in self.cache:
1893 self.cache[cn] = connection
1894
1895 def get_connection(self, host, port):
1896 connection = None
1897
1898 cn = self.get_connection_name(host, port)
1899 if cn in self.cache:
1900 connection = self.cache[cn]
1901
1902 return connection
1903
1904 def remove_connection(self, host, port):
1905 cn = self.get_connection_name(host, port)
1906 if cn in self.cache:
1907 self.cache[cn].close()
1908 del self.cache[cn]
1909
1910 def close_connections(self):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001911 for cn in list(self.cache.keys()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001912 self.cache[cn].close()
1913 del self.cache[cn]
1914
1915from . import cvs
1916from . import git
1917from . import gitsm
1918from . import gitannex
1919from . import local
1920from . import svn
1921from . import wget
1922from . import ssh
1923from . import sftp
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001924from . import s3
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001925from . import perforce
1926from . import bzr
1927from . import hg
1928from . import osc
1929from . import repo
1930from . import clearcase
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001931from . import npm
Andrew Geissler82c905d2020-04-13 13:39:40 -05001932from . import npmsw
Andrew Geissler95ac1b82021-03-31 14:34:31 -05001933from . import az
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001934
1935methods.append(local.Local())
1936methods.append(wget.Wget())
1937methods.append(svn.Svn())
1938methods.append(git.Git())
1939methods.append(gitsm.GitSM())
1940methods.append(gitannex.GitANNEX())
1941methods.append(cvs.Cvs())
1942methods.append(ssh.SSH())
1943methods.append(sftp.SFTP())
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001944methods.append(s3.S3())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001945methods.append(perforce.Perforce())
1946methods.append(bzr.Bzr())
1947methods.append(hg.Hg())
1948methods.append(osc.Osc())
1949methods.append(repo.Repo())
1950methods.append(clearcase.ClearCase())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001951methods.append(npm.Npm())
Andrew Geissler82c905d2020-04-13 13:39:40 -05001952methods.append(npmsw.NpmShrinkWrap())
Andrew Geissler95ac1b82021-03-31 14:34:31 -05001953methods.append(az.Az())