blob: b853da30bdbf84eeeb9b2f039bf51e5e4ccd6a3a [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8"""
9
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2012 Intel Corporation
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
Patrick Williamsc124f4f2015-09-15 14:41:29 -050028import os, re
29import signal
Patrick Williamsc124f4f2015-09-15 14:41:29 -050030import logging
Patrick Williamsc0f7c042017-02-23 20:41:17 -060031import urllib.request, urllib.parse, urllib.error
32if 'git' not in urllib.parse.uses_netloc:
33 urllib.parse.uses_netloc.append('git')
34import operator
35import collections
36import subprocess
37import pickle
Brad Bishop6e60e8b2018-02-01 10:27:11 -050038import errno
Patrick Williamsc124f4f2015-09-15 14:41:29 -050039import bb.persist_data, bb.utils
40import bb.checksum
Patrick Williamsc124f4f2015-09-15 14:41:29 -050041import bb.process
Patrick Williamsc124f4f2015-09-15 14:41:29 -050042
43__version__ = "2"
44_checksum_cache = bb.checksum.FileChecksumCache()
45
46logger = logging.getLogger("BitBake.Fetcher")
47
Patrick Williamsc124f4f2015-09-15 14:41:29 -050048class BBFetchException(Exception):
49 """Class all fetch exceptions inherit from"""
50 def __init__(self, message):
51 self.msg = message
52 Exception.__init__(self, message)
53
54 def __str__(self):
55 return self.msg
56
57class UntrustedUrl(BBFetchException):
58 """Exception raised when encountering a host not listed in BB_ALLOWED_NETWORKS"""
59 def __init__(self, url, message=''):
60 if message:
61 msg = message
62 else:
63 msg = "The URL: '%s' is not trusted and cannot be used" % url
64 self.url = url
65 BBFetchException.__init__(self, msg)
66 self.args = (url,)
67
68class MalformedUrl(BBFetchException):
69 """Exception raised when encountering an invalid url"""
70 def __init__(self, url, message=''):
71 if message:
72 msg = message
73 else:
74 msg = "The URL: '%s' is invalid and cannot be interpreted" % url
75 self.url = url
76 BBFetchException.__init__(self, msg)
77 self.args = (url,)
78
79class FetchError(BBFetchException):
80 """General fetcher exception when something happens incorrectly"""
81 def __init__(self, message, url = None):
82 if url:
83 msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
84 else:
85 msg = "Fetcher failure: %s" % message
86 self.url = url
87 BBFetchException.__init__(self, msg)
88 self.args = (message, url)
89
90class ChecksumError(FetchError):
91 """Exception when mismatched checksum encountered"""
92 def __init__(self, message, url = None, checksum = None):
93 self.checksum = checksum
94 FetchError.__init__(self, message, url)
95
96class NoChecksumError(FetchError):
97 """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
98
99class UnpackError(BBFetchException):
100 """General fetcher exception when something happens incorrectly when unpacking"""
101 def __init__(self, message, url):
102 msg = "Unpack failure for URL: '%s'. %s" % (url, message)
103 self.url = url
104 BBFetchException.__init__(self, msg)
105 self.args = (message, url)
106
107class NoMethodError(BBFetchException):
108 """Exception raised when there is no method to obtain a supplied url or set of urls"""
109 def __init__(self, url):
110 msg = "Could not find a fetcher which supports the URL: '%s'" % url
111 self.url = url
112 BBFetchException.__init__(self, msg)
113 self.args = (url,)
114
115class MissingParameterError(BBFetchException):
116 """Exception raised when a fetch method is missing a critical parameter in the url"""
117 def __init__(self, missing, url):
118 msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
119 self.url = url
120 self.missing = missing
121 BBFetchException.__init__(self, msg)
122 self.args = (missing, url)
123
124class ParameterError(BBFetchException):
125 """Exception raised when a url cannot be proccessed due to invalid parameters."""
126 def __init__(self, message, url):
127 msg = "URL: '%s' has invalid parameters. %s" % (url, message)
128 self.url = url
129 BBFetchException.__init__(self, msg)
130 self.args = (message, url)
131
132class NetworkAccess(BBFetchException):
133 """Exception raised when network access is disabled but it is required."""
134 def __init__(self, url, cmd):
135 msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
136 self.url = url
137 self.cmd = cmd
138 BBFetchException.__init__(self, msg)
139 self.args = (url, cmd)
140
141class NonLocalMethod(Exception):
142 def __init__(self):
143 Exception.__init__(self)
144
145
146class URI(object):
147 """
148 A class representing a generic URI, with methods for
149 accessing the URI components, and stringifies to the
150 URI.
151
152 It is constructed by calling it with a URI, or setting
153 the attributes manually:
154
155 uri = URI("http://example.com/")
156
157 uri = URI()
158 uri.scheme = 'http'
159 uri.hostname = 'example.com'
160 uri.path = '/'
161
162 It has the following attributes:
163
164 * scheme (read/write)
165 * userinfo (authentication information) (read/write)
166 * username (read/write)
167 * password (read/write)
168
169 Note, password is deprecated as of RFC 3986.
170
171 * hostname (read/write)
172 * port (read/write)
173 * hostport (read only)
174 "hostname:port", if both are set, otherwise just "hostname"
175 * path (read/write)
176 * path_quoted (read/write)
177 A URI quoted version of path
178 * params (dict) (read/write)
179 * query (dict) (read/write)
180 * relative (bool) (read only)
181 True if this is a "relative URI", (e.g. file:foo.diff)
182
183 It stringifies to the URI itself.
184
185 Some notes about relative URIs: while it's specified that
186 a URI beginning with <scheme>:// should either be directly
187 followed by a hostname or a /, the old URI handling of the
188 fetch2 library did not comform to this. Therefore, this URI
189 class has some kludges to make sure that URIs are parsed in
190 a way comforming to bitbake's current usage. This URI class
191 supports the following:
192
193 file:relative/path.diff (IETF compliant)
194 git:relative/path.git (IETF compliant)
195 git:///absolute/path.git (IETF compliant)
196 file:///absolute/path.diff (IETF compliant)
197
198 file://relative/path.diff (not IETF compliant)
199
200 But it does not support the following:
201
202 file://hostname/absolute/path.diff (would be IETF compliant)
203
204 Note that the last case only applies to a list of
205 "whitelisted" schemes (currently only file://), that requires
206 its URIs to not have a network location.
207 """
208
209 _relative_schemes = ['file', 'git']
210 _netloc_forbidden = ['file']
211
212 def __init__(self, uri=None):
213 self.scheme = ''
214 self.userinfo = ''
215 self.hostname = ''
216 self.port = None
217 self._path = ''
218 self.params = {}
219 self.query = {}
220 self.relative = False
221
222 if not uri:
223 return
224
225 # We hijack the URL parameters, since the way bitbake uses
226 # them are not quite RFC compliant.
227 uri, param_str = (uri.split(";", 1) + [None])[:2]
228
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600229 urlp = urllib.parse.urlparse(uri)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500230 self.scheme = urlp.scheme
231
232 reparse = 0
233
234 # Coerce urlparse to make URI scheme use netloc
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600235 if not self.scheme in urllib.parse.uses_netloc:
236 urllib.parse.uses_params.append(self.scheme)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500237 reparse = 1
238
239 # Make urlparse happy(/ier) by converting local resources
240 # to RFC compliant URL format. E.g.:
241 # file://foo.diff -> file:foo.diff
242 if urlp.scheme in self._netloc_forbidden:
243 uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
244 reparse = 1
245
246 if reparse:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600247 urlp = urllib.parse.urlparse(uri)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500248
249 # Identify if the URI is relative or not
250 if urlp.scheme in self._relative_schemes and \
251 re.compile("^\w+:(?!//)").match(uri):
252 self.relative = True
253
254 if not self.relative:
255 self.hostname = urlp.hostname or ''
256 self.port = urlp.port
257
258 self.userinfo += urlp.username or ''
259
260 if urlp.password:
261 self.userinfo += ':%s' % urlp.password
262
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600263 self.path = urllib.parse.unquote(urlp.path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500264
265 if param_str:
266 self.params = self._param_str_split(param_str, ";")
267 if urlp.query:
268 self.query = self._param_str_split(urlp.query, "&")
269
270 def __str__(self):
271 userinfo = self.userinfo
272 if userinfo:
273 userinfo += '@'
274
275 return "%s:%s%s%s%s%s%s" % (
276 self.scheme,
277 '' if self.relative else '//',
278 userinfo,
279 self.hostport,
280 self.path_quoted,
281 self._query_str(),
282 self._param_str())
283
284 def _param_str(self):
285 return (
286 ''.join([';', self._param_str_join(self.params, ";")])
287 if self.params else '')
288
289 def _query_str(self):
290 return (
291 ''.join(['?', self._param_str_join(self.query, "&")])
292 if self.query else '')
293
294 def _param_str_split(self, string, elmdelim, kvdelim="="):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600295 ret = collections.OrderedDict()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500296 for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
297 ret[k] = v
298 return ret
299
300 def _param_str_join(self, dict_, elmdelim, kvdelim="="):
301 return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
302
303 @property
304 def hostport(self):
305 if not self.port:
306 return self.hostname
307 return "%s:%d" % (self.hostname, self.port)
308
309 @property
310 def path_quoted(self):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600311 return urllib.parse.quote(self.path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500312
313 @path_quoted.setter
314 def path_quoted(self, path):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600315 self.path = urllib.parse.unquote(path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500316
317 @property
318 def path(self):
319 return self._path
320
321 @path.setter
322 def path(self, path):
323 self._path = path
324
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500325 if not path or re.compile("^/").match(path):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500326 self.relative = False
327 else:
328 self.relative = True
329
330 @property
331 def username(self):
332 if self.userinfo:
333 return (self.userinfo.split(":", 1))[0]
334 return ''
335
336 @username.setter
337 def username(self, username):
338 password = self.password
339 self.userinfo = username
340 if password:
341 self.userinfo += ":%s" % password
342
343 @property
344 def password(self):
345 if self.userinfo and ":" in self.userinfo:
346 return (self.userinfo.split(":", 1))[1]
347 return ''
348
349 @password.setter
350 def password(self, password):
351 self.userinfo = "%s:%s" % (self.username, password)
352
353def decodeurl(url):
354 """Decodes an URL into the tokens (scheme, network location, path,
355 user, password, parameters).
356 """
357
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500358 m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500359 if not m:
360 raise MalformedUrl(url)
361
362 type = m.group('type')
363 location = m.group('location')
364 if not location:
365 raise MalformedUrl(url)
366 user = m.group('user')
367 parm = m.group('parm')
368
369 locidx = location.find('/')
370 if locidx != -1 and type.lower() != 'file':
371 host = location[:locidx]
372 path = location[locidx:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500373 elif type.lower() == 'file':
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500374 host = ""
375 path = location
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500376 else:
377 host = location
378 path = ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500379 if user:
380 m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
381 if m:
382 user = m.group('user')
383 pswd = m.group('pswd')
384 else:
385 user = ''
386 pswd = ''
387
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600388 p = collections.OrderedDict()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500389 if parm:
390 for s in parm.split(';'):
391 if s:
392 if not '=' in s:
393 raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
394 s1, s2 = s.split('=')
395 p[s1] = s2
396
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600397 return type, host, urllib.parse.unquote(path), user, pswd, p
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500398
399def encodeurl(decoded):
400 """Encodes a URL from tokens (scheme, network location, path,
401 user, password, parameters).
402 """
403
404 type, host, path, user, pswd, p = decoded
405
406 if not path:
407 raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
408 if not type:
409 raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
410 url = '%s://' % type
411 if user and type != "file":
412 url += "%s" % user
413 if pswd:
414 url += ":%s" % pswd
415 url += "@"
416 if host and type != "file":
417 url += "%s" % host
418 # Standardise path to ensure comparisons work
419 while '//' in path:
420 path = path.replace("//", "/")
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600421 url += "%s" % urllib.parse.quote(path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500422 if p:
423 for parm in p:
424 url += ";%s=%s" % (parm, p[parm])
425
426 return url
427
428def uri_replace(ud, uri_find, uri_replace, replacements, d):
429 if not ud.url or not uri_find or not uri_replace:
430 logger.error("uri_replace: passed an undefined value, not replacing")
431 return None
432 uri_decoded = list(decodeurl(ud.url))
433 uri_find_decoded = list(decodeurl(uri_find))
434 uri_replace_decoded = list(decodeurl(uri_replace))
435 logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
436 result_decoded = ['', '', '', '', '', {}]
437 for loc, i in enumerate(uri_find_decoded):
438 result_decoded[loc] = uri_decoded[loc]
439 regexp = i
440 if loc == 0 and regexp and not regexp.endswith("$"):
441 # Leaving the type unanchored can mean "https" matching "file" can become "files"
442 # which is clearly undesirable.
443 regexp += "$"
444 if loc == 5:
445 # Handle URL parameters
446 if i:
447 # Any specified URL parameters must match
448 for k in uri_replace_decoded[loc]:
449 if uri_decoded[loc][k] != uri_replace_decoded[loc][k]:
450 return None
451 # Overwrite any specified replacement parameters
452 for k in uri_replace_decoded[loc]:
453 for l in replacements:
454 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
455 result_decoded[loc][k] = uri_replace_decoded[loc][k]
456 elif (re.match(regexp, uri_decoded[loc])):
457 if not uri_replace_decoded[loc]:
458 result_decoded[loc] = ""
459 else:
460 for k in replacements:
461 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
462 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
Patrick Williamsd7e96312015-09-22 08:09:05 -0500463 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500464 if loc == 2:
465 # Handle path manipulations
466 basename = None
467 if uri_decoded[0] != uri_replace_decoded[0] and ud.mirrortarball:
468 # If the source and destination url types differ, must be a mirrortarball mapping
469 basename = os.path.basename(ud.mirrortarball)
470 # Kill parameters, they make no sense for mirror tarballs
471 uri_decoded[5] = {}
472 elif ud.localpath and ud.method.supports_checksum(ud):
473 basename = os.path.basename(ud.localpath)
474 if basename and not result_decoded[loc].endswith(basename):
475 result_decoded[loc] = os.path.join(result_decoded[loc], basename)
476 else:
477 return None
478 result = encodeurl(result_decoded)
479 if result == ud.url:
480 return None
481 logger.debug(2, "For url %s returning %s" % (ud.url, result))
482 return result
483
484methods = []
485urldata_cache = {}
486saved_headrevs = {}
487
488def fetcher_init(d):
489 """
490 Called to initialize the fetchers once the configuration data is known.
491 Calls before this must not hit the cache.
492 """
493 # When to drop SCM head revisions controlled by user policy
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500494 srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500495 if srcrev_policy == "cache":
496 logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
497 elif srcrev_policy == "clear":
498 logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
499 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
500 try:
501 bb.fetch2.saved_headrevs = revs.items()
502 except:
503 pass
504 revs.clear()
505 else:
506 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
507
508 _checksum_cache.init_cache(d)
509
510 for m in methods:
511 if hasattr(m, "init"):
512 m.init(d)
513
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500514def fetcher_parse_save():
515 _checksum_cache.save_extras()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500516
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500517def fetcher_parse_done():
518 _checksum_cache.save_merge()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500519
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500520def fetcher_compare_revisions():
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500521 """
522 Compare the revisions in the persistant cache with current values and
523 return true/false on whether they've changed.
524 """
525
526 data = bb.persist_data.persist('BB_URI_HEADREVS', d).items()
527 data2 = bb.fetch2.saved_headrevs
528
529 changed = False
530 for key in data:
531 if key not in data2 or data2[key] != data[key]:
532 logger.debug(1, "%s changed", key)
533 changed = True
534 return True
535 else:
536 logger.debug(2, "%s did not change", key)
537 return False
538
539def mirror_from_string(data):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500540 mirrors = (data or "").replace('\\n',' ').split()
541 # Split into pairs
542 if len(mirrors) % 2 != 0:
543 bb.warn('Invalid mirror data %s, should have paired members.' % data)
544 return list(zip(*[iter(mirrors)]*2))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500545
546def verify_checksum(ud, d, precomputed={}):
547 """
548 verify the MD5 and SHA256 checksum for downloaded src
549
550 Raises a FetchError if one or both of the SRC_URI checksums do not match
551 the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
552 checksums specified.
553
554 Returns a dict of checksums that can be stored in a done stamp file and
555 passed in as precomputed parameter in a later call to avoid re-computing
556 the checksums from the file. This allows verifying the checksums of the
557 file against those in the recipe each time, rather than only after
558 downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
559 """
560
561 _MD5_KEY = "md5"
562 _SHA256_KEY = "sha256"
563
564 if ud.ignore_checksums or not ud.method.supports_checksum(ud):
565 return {}
566
567 if _MD5_KEY in precomputed:
568 md5data = precomputed[_MD5_KEY]
569 else:
570 md5data = bb.utils.md5_file(ud.localpath)
571
572 if _SHA256_KEY in precomputed:
573 sha256data = precomputed[_SHA256_KEY]
574 else:
575 sha256data = bb.utils.sha256_file(ud.localpath)
576
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500577 if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500578 # If strict checking enabled and neither sum defined, raise error
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500579 strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500580 if strict == "1":
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500581 logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
582 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
583 (ud.localpath, ud.md5_name, md5data,
584 ud.sha256_name, sha256data))
585 raise NoChecksumError('Missing SRC_URI checksum', ud.url)
586
587 # Log missing sums so user can more easily add them
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600588 logger.warning('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
589 'SRC_URI[%s] = "%s"',
590 ud.localpath, ud.md5_name, md5data)
591 logger.warning('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
592 'SRC_URI[%s] = "%s"',
593 ud.localpath, ud.sha256_name, sha256data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500594
595 # We want to alert the user if a checksum is defined in the recipe but
596 # it does not match.
597 msg = ""
598 mismatch = False
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500599 if ud.md5_expected and ud.md5_expected != md5data:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500600 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
601 mismatch = True;
602
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500603 if ud.sha256_expected and ud.sha256_expected != sha256data:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500604 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
605 mismatch = True;
606
607 if mismatch:
608 msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
609
610 if len(msg):
611 raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
612
613 return {
614 _MD5_KEY: md5data,
615 _SHA256_KEY: sha256data
616 }
617
618
619def verify_donestamp(ud, d, origud=None):
620 """
621 Check whether the done stamp file has the right checksums (if the fetch
622 method supports them). If it doesn't, delete the done stamp and force
623 a re-download.
624
625 Returns True, if the donestamp exists and is valid, False otherwise. When
626 returning False, any existing done stamps are removed.
627 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500628 if not ud.needdonestamp or (origud and not origud.needdonestamp):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500629 return True
630
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500631 if not os.path.exists(ud.donestamp):
632 return False
633
634 if (not ud.method.supports_checksum(ud) or
635 (origud and not origud.method.supports_checksum(origud))):
636 # done stamp exists, checksums not supported; assume the local file is
637 # current
638 return True
639
640 if not os.path.exists(ud.localpath):
641 # done stamp exists, but the downloaded file does not; the done stamp
642 # must be incorrect, re-trigger the download
643 bb.utils.remove(ud.donestamp)
644 return False
645
646 precomputed_checksums = {}
647 # Only re-use the precomputed checksums if the donestamp is newer than the
648 # file. Do not rely on the mtime of directories, though. If ud.localpath is
649 # a directory, there will probably not be any checksums anyway.
650 if (os.path.isdir(ud.localpath) or
651 os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
652 try:
653 with open(ud.donestamp, "rb") as cachefile:
654 pickled = pickle.Unpickler(cachefile)
655 precomputed_checksums.update(pickled.load())
656 except Exception as e:
657 # Avoid the warnings on the upgrade path from emtpy done stamp
658 # files to those containing the checksums.
659 if not isinstance(e, EOFError):
660 # Ignore errors, they aren't fatal
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600661 logger.warning("Couldn't load checksums from donestamp %s: %s "
662 "(msg: %s)" % (ud.donestamp, type(e).__name__,
663 str(e)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500664
665 try:
666 checksums = verify_checksum(ud, d, precomputed_checksums)
667 # If the cache file did not have the checksums, compute and store them
668 # as an upgrade path from the previous done stamp file format.
669 if checksums != precomputed_checksums:
670 with open(ud.donestamp, "wb") as cachefile:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600671 p = pickle.Pickler(cachefile, 2)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500672 p.dump(checksums)
673 return True
674 except ChecksumError as e:
675 # Checksums failed to verify, trigger re-download and remove the
676 # incorrect stamp file.
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600677 logger.warning("Checksum mismatch for local file %s\n"
678 "Cleaning and trying again." % ud.localpath)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500679 if os.path.exists(ud.localpath):
680 rename_bad_checksum(ud, e.checksum)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500681 bb.utils.remove(ud.donestamp)
682 return False
683
684
685def update_stamp(ud, d):
686 """
687 donestamp is file stamp indicating the whole fetching is done
688 this function update the stamp after verifying the checksum
689 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500690 if not ud.needdonestamp:
691 return
692
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500693 if os.path.exists(ud.donestamp):
694 # Touch the done stamp file to show active use of the download
695 try:
696 os.utime(ud.donestamp, None)
697 except:
698 # Errors aren't fatal here
699 pass
700 else:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500701 try:
702 checksums = verify_checksum(ud, d)
703 # Store the checksums for later re-verification against the recipe
704 with open(ud.donestamp, "wb") as cachefile:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600705 p = pickle.Pickler(cachefile, 2)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500706 p.dump(checksums)
707 except ChecksumError as e:
708 # Checksums failed to verify, trigger re-download and remove the
709 # incorrect stamp file.
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600710 logger.warning("Checksum mismatch for local file %s\n"
711 "Cleaning and trying again." % ud.localpath)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500712 if os.path.exists(ud.localpath):
713 rename_bad_checksum(ud, e.checksum)
714 bb.utils.remove(ud.donestamp)
715 raise
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500716
717def subprocess_setup():
718 # Python installs a SIGPIPE handler by default. This is usually not what
719 # non-Python subprocesses expect.
720 # SIGPIPE errors are known issues with gzip/bash
721 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
722
723def get_autorev(d):
724 # only not cache src rev in autorev case
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500725 if d.getVar('BB_SRCREV_POLICY') != "cache":
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500726 d.setVar('BB_DONT_CACHE', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500727 return "AUTOINC"
728
729def get_srcrev(d, method_name='sortable_revision'):
730 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500731 Return the revision string, usually for use in the version string (PV) of the current package
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500732 Most packages usually only have one SCM so we just pass on the call.
733 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
734 have been set.
735
736 The idea here is that we put the string "AUTOINC+" into return value if the revisions are not
737 incremental, other code is then responsible for turning that into an increasing value (if needed)
738
739 A method_name can be supplied to retrieve an alternatively formatted revision from a fetcher, if
740 that fetcher provides a method with the given name and the same signature as sortable_revision.
741 """
742
743 scms = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500744 fetcher = Fetch(d.getVar('SRC_URI').split(), d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500745 urldata = fetcher.ud
746 for u in urldata:
747 if urldata[u].method.supports_srcrev():
748 scms.append(u)
749
750 if len(scms) == 0:
751 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
752
753 if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
754 autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
755 if len(rev) > 10:
756 rev = rev[:10]
757 if autoinc:
758 return "AUTOINC+" + rev
759 return rev
760
761 #
762 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
763 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500764 format = d.getVar('SRCREV_FORMAT')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500765 if not format:
766 raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
767
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600768 name_to_rev = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500769 seenautoinc = False
770 for scm in scms:
771 ud = urldata[scm]
772 for name in ud.names:
773 autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
774 seenautoinc = seenautoinc or autoinc
775 if len(rev) > 10:
776 rev = rev[:10]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600777 name_to_rev[name] = rev
778 # Replace names by revisions in the SRCREV_FORMAT string. The approach used
779 # here can handle names being prefixes of other names and names appearing
780 # as substrings in revisions (in which case the name should not be
781 # expanded). The '|' regular expression operator tries matches from left to
782 # right, so we need to sort the names with the longest ones first.
783 names_descending_len = sorted(name_to_rev, key=len, reverse=True)
784 name_to_rev_re = "|".join(re.escape(name) for name in names_descending_len)
785 format = re.sub(name_to_rev_re, lambda match: name_to_rev[match.group(0)], format)
786
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500787 if seenautoinc:
788 format = "AUTOINC+" + format
789
790 return format
791
792def localpath(url, d):
793 fetcher = bb.fetch2.Fetch([url], d)
794 return fetcher.localpath(url)
795
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600796def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500797 """
798 Run cmd returning the command output
799 Raise an error if interrupted or cmd fails
800 Optionally echo command output to stdout
801 Optionally remove the files/directories listed in cleanup upon failure
802 """
803
804 # Need to export PATH as binary could be in metadata paths
805 # rather than host provided
806 # Also include some other variables.
807 # FIXME: Should really include all export varaiables?
808 exportvars = ['HOME', 'PATH',
809 'HTTP_PROXY', 'http_proxy',
810 'HTTPS_PROXY', 'https_proxy',
811 'FTP_PROXY', 'ftp_proxy',
812 'FTPS_PROXY', 'ftps_proxy',
813 'NO_PROXY', 'no_proxy',
814 'ALL_PROXY', 'all_proxy',
815 'GIT_PROXY_COMMAND',
816 'GIT_SSL_CAINFO',
817 'GIT_SMART_HTTP',
818 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600819 'SOCKS5_USER', 'SOCKS5_PASSWD',
820 'DBUS_SESSION_BUS_ADDRESS',
821 'P4CONFIG']
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500822
823 if not cleanup:
824 cleanup = []
825
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500826 # If PATH contains WORKDIR which contains PV which contains SRCPV we
827 # can end up in circular recursion here so give the option of breaking it
828 # in a data store copy.
829 try:
830 d.getVar("PV")
831 except bb.data_smart.ExpansionError:
832 d = bb.data.createCopy(d)
833 d.setVar("PV", "fetcheravoidrecurse")
834
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600835 origenv = d.getVar("BB_ORIGENV", False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500836 for var in exportvars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500837 val = d.getVar(var) or (origenv and origenv.getVar(var))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500838 if val:
839 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
840
841 logger.debug(1, "Running %s", cmd)
842
843 success = False
844 error_message = ""
845
846 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600847 (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500848 success = True
849 except bb.process.NotFoundError as e:
850 error_message = "Fetch command %s" % (e.command)
851 except bb.process.ExecutionError as e:
852 if e.stdout:
853 output = "output:\n%s\n%s" % (e.stdout, e.stderr)
854 elif e.stderr:
855 output = "output:\n%s" % e.stderr
856 else:
857 output = "no output"
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600858 error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500859 except bb.process.CmdError as e:
860 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
861 if not success:
862 for f in cleanup:
863 try:
864 bb.utils.remove(f, True)
865 except OSError:
866 pass
867
868 raise FetchError(error_message)
869
870 return output
871
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500872def check_network_access(d, info, url):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500873 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500874 log remote network access, and error if BB_NO_NETWORK is set or the given
875 URI is untrusted
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500876 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500877 if d.getVar("BB_NO_NETWORK") == "1":
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500878 raise NetworkAccess(url, info)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500879 elif not trusted_network(d, url):
880 raise UntrustedUrl(url, info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500881 else:
882 logger.debug(1, "Fetcher accessed the network with the command %s" % info)
883
884def build_mirroruris(origud, mirrors, ld):
885 uris = []
886 uds = []
887
888 replacements = {}
889 replacements["TYPE"] = origud.type
890 replacements["HOST"] = origud.host
891 replacements["PATH"] = origud.path
892 replacements["BASENAME"] = origud.path.split("/")[-1]
893 replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
894
Patrick Williamsd7e96312015-09-22 08:09:05 -0500895 def adduri(ud, uris, uds, mirrors):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500896 for line in mirrors:
897 try:
898 (find, replace) = line
899 except ValueError:
900 continue
901 newuri = uri_replace(ud, find, replace, replacements, ld)
902 if not newuri or newuri in uris or newuri == origud.url:
903 continue
904
905 if not trusted_network(ld, newuri):
906 logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri))
907 continue
908
Patrick Williamsd7e96312015-09-22 08:09:05 -0500909 # Create a local copy of the mirrors minus the current line
910 # this will prevent us from recursively processing the same line
911 # as well as indirect recursion A -> B -> C -> A
912 localmirrors = list(mirrors)
913 localmirrors.remove(line)
914
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500915 try:
916 newud = FetchData(newuri, ld)
917 newud.setup_localpath(ld)
918 except bb.fetch2.BBFetchException as e:
919 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
920 logger.debug(1, str(e))
921 try:
922 # setup_localpath of file:// urls may fail, we should still see
923 # if mirrors of the url exist
Patrick Williamsd7e96312015-09-22 08:09:05 -0500924 adduri(newud, uris, uds, localmirrors)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500925 except UnboundLocalError:
926 pass
927 continue
928 uris.append(newuri)
929 uds.append(newud)
930
Patrick Williamsd7e96312015-09-22 08:09:05 -0500931 adduri(newud, uris, uds, localmirrors)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500932
Patrick Williamsd7e96312015-09-22 08:09:05 -0500933 adduri(origud, uris, uds, mirrors)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500934
935 return uris, uds
936
937def rename_bad_checksum(ud, suffix):
938 """
939 Renames files to have suffix from parameter
940 """
941
942 if ud.localpath is None:
943 return
944
945 new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
946 bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
947 bb.utils.movefile(ud.localpath, new_localpath)
948
949
950def try_mirror_url(fetch, origud, ud, ld, check = False):
951 # Return of None or a value means we're finished
952 # False means try another url
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500953
954 if ud.lockfile and ud.lockfile != origud.lockfile:
955 lf = bb.utils.lockfile(ud.lockfile)
956
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500957 try:
958 if check:
959 found = ud.method.checkstatus(fetch, ud, ld)
960 if found:
961 return found
962 return False
963
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500964 if not verify_donestamp(ud, ld, origud) or ud.method.need_update(ud, ld):
965 ud.method.download(ud, ld)
966 if hasattr(ud.method,"build_mirror_data"):
967 ud.method.build_mirror_data(ud, ld)
968
969 if not ud.localpath or not os.path.exists(ud.localpath):
970 return False
971
972 if ud.localpath == origud.localpath:
973 return ud.localpath
974
975 # We may be obtaining a mirror tarball which needs further processing by the real fetcher
976 # If that tarball is a local file:// we need to provide a symlink to it
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500977 dldir = ld.getVar("DL_DIR")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500978 if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
979 and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
980 # Create donestamp in old format to avoid triggering a re-download
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500981 if ud.donestamp:
982 bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
983 open(ud.donestamp, 'w').close()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500984 dest = os.path.join(dldir, os.path.basename(ud.localpath))
985 if not os.path.exists(dest):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500986 # In case this is executing without any file locks held (as is
987 # the case for file:// URLs), two tasks may end up here at the
988 # same time, in which case we do not want the second task to
989 # fail when the link has already been created by the first task.
990 try:
991 os.symlink(ud.localpath, dest)
992 except FileExistsError:
993 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500994 if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
995 origud.method.download(origud, ld)
996 if hasattr(origud.method,"build_mirror_data"):
997 origud.method.build_mirror_data(origud, ld)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500998 return origud.localpath
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500999 # Otherwise the result is a local file:// and we symlink to it
1000 if not os.path.exists(origud.localpath):
1001 if os.path.islink(origud.localpath):
1002 # Broken symbolic link
1003 os.unlink(origud.localpath)
1004
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001005 # As per above, in case two tasks end up here simultaneously.
1006 try:
1007 os.symlink(ud.localpath, origud.localpath)
1008 except FileExistsError:
1009 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001010 update_stamp(origud, ld)
1011 return ud.localpath
1012
1013 except bb.fetch2.NetworkAccess:
1014 raise
1015
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001016 except IOError as e:
1017 if e.errno in [os.errno.ESTALE]:
1018 logger.warn("Stale Error Observed %s." % ud.url)
1019 return False
1020 raise
1021
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001022 except bb.fetch2.BBFetchException as e:
1023 if isinstance(e, ChecksumError):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001024 logger.warning("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
1025 logger.warning(str(e))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001026 if os.path.exists(ud.localpath):
1027 rename_bad_checksum(ud, e.checksum)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001028 elif isinstance(e, NoChecksumError):
1029 raise
1030 else:
1031 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
1032 logger.debug(1, str(e))
1033 try:
1034 ud.method.clean(ud, ld)
1035 except UnboundLocalError:
1036 pass
1037 return False
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001038 finally:
1039 if ud.lockfile and ud.lockfile != origud.lockfile:
1040 bb.utils.unlockfile(lf)
1041
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001042
1043def try_mirrors(fetch, d, origud, mirrors, check = False):
1044 """
1045 Try to use a mirrored version of the sources.
1046 This method will be automatically called before the fetchers go.
1047
1048 d Is a bb.data instance
1049 uri is the original uri we're trying to download
1050 mirrors is the list of mirrors we're going to try
1051 """
1052 ld = d.createCopy()
1053
1054 uris, uds = build_mirroruris(origud, mirrors, ld)
1055
1056 for index, uri in enumerate(uris):
1057 ret = try_mirror_url(fetch, origud, uds[index], ld, check)
1058 if ret != False:
1059 return ret
1060 return None
1061
1062def trusted_network(d, url):
1063 """
1064 Use a trusted url during download if networking is enabled and
1065 BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
1066 Note: modifies SRC_URI & mirrors.
1067 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001068 if d.getVar('BB_NO_NETWORK') == "1":
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001069 return True
1070
1071 pkgname = d.expand(d.getVar('PN', False))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001072 trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001073
1074 if not trusted_hosts:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001075 trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001076
1077 # Not enabled.
1078 if not trusted_hosts:
1079 return True
1080
1081 scheme, network, path, user, passwd, param = decodeurl(url)
1082
1083 if not network:
1084 return True
1085
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001086 network = network.split(':')[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001087 network = network.lower()
1088
1089 for host in trusted_hosts.split(" "):
1090 host = host.lower()
1091 if host.startswith("*.") and ("." + network).endswith(host[1:]):
1092 return True
1093 if host == network:
1094 return True
1095
1096 return False
1097
1098def srcrev_internal_helper(ud, d, name):
1099 """
1100 Return:
1101 a) a source revision if specified
1102 b) latest revision if SRCREV="AUTOINC"
1103 c) None if not specified
1104 """
1105
1106 srcrev = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001107 pn = d.getVar("PN")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001108 attempts = []
1109 if name != '' and pn:
1110 attempts.append("SRCREV_%s_pn-%s" % (name, pn))
1111 if name != '':
1112 attempts.append("SRCREV_%s" % name)
1113 if pn:
1114 attempts.append("SRCREV_pn-%s" % pn)
1115 attempts.append("SRCREV")
1116
1117 for a in attempts:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001118 srcrev = d.getVar(a)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001119 if srcrev and srcrev != "INVALID":
1120 break
1121
1122 if 'rev' in ud.parm and 'tag' in ud.parm:
1123 raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
1124
1125 if 'rev' in ud.parm or 'tag' in ud.parm:
1126 if 'rev' in ud.parm:
1127 parmrev = ud.parm['rev']
1128 else:
1129 parmrev = ud.parm['tag']
1130 if srcrev == "INVALID" or not srcrev:
1131 return parmrev
1132 if srcrev != parmrev:
1133 raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please spcify one valid value" % (srcrev, parmrev))
1134 return parmrev
1135
1136 if srcrev == "INVALID" or not srcrev:
1137 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
1138 if srcrev == "AUTOINC":
1139 srcrev = ud.method.latest_revision(ud, d, name)
1140
1141 return srcrev
1142
1143def get_checksum_file_list(d):
1144 """ Get a list of files checksum in SRC_URI
1145
1146 Returns the resolved local paths of all local file entries in
1147 SRC_URI as a space-separated string
1148 """
1149 fetch = Fetch([], d, cache = False, localonly = True)
1150
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001151 dl_dir = d.getVar('DL_DIR')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001152 filelist = []
1153 for u in fetch.urls:
1154 ud = fetch.ud[u]
1155
1156 if ud and isinstance(ud.method, local.Local):
1157 paths = ud.method.localpaths(ud, d)
1158 for f in paths:
1159 pth = ud.decodedurl
1160 if '*' in pth:
1161 f = os.path.join(os.path.abspath(f), pth)
1162 if f.startswith(dl_dir):
1163 # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
1164 if os.path.exists(f):
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001165 bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001166 else:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001167 bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001168 filelist.append(f + ":" + str(os.path.exists(f)))
1169
1170 return " ".join(filelist)
1171
1172def get_file_checksums(filelist, pn):
1173 """Get a list of the checksums for a list of local files
1174
1175 Returns the checksums for a list of local files, caching the results as
1176 it proceeds
1177
1178 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001179 return _checksum_cache.get_checksums(filelist, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001180
1181
1182class FetchData(object):
1183 """
1184 A class which represents the fetcher state for a given URI.
1185 """
1186 def __init__(self, url, d, localonly = False):
1187 # localpath is the location of a downloaded result. If not set, the file is local.
1188 self.donestamp = None
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001189 self.needdonestamp = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001190 self.localfile = ""
1191 self.localpath = None
1192 self.lockfile = None
1193 self.mirrortarball = None
1194 self.basename = None
1195 self.basepath = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001196 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(d.expand(url))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001197 self.date = self.getSRCDate(d)
1198 self.url = url
1199 if not self.user and "user" in self.parm:
1200 self.user = self.parm["user"]
1201 if not self.pswd and "pswd" in self.parm:
1202 self.pswd = self.parm["pswd"]
1203 self.setup = False
1204
1205 if "name" in self.parm:
1206 self.md5_name = "%s.md5sum" % self.parm["name"]
1207 self.sha256_name = "%s.sha256sum" % self.parm["name"]
1208 else:
1209 self.md5_name = "md5sum"
1210 self.sha256_name = "sha256sum"
1211 if self.md5_name in self.parm:
1212 self.md5_expected = self.parm[self.md5_name]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001213 elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001214 self.md5_expected = None
1215 else:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001216 self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001217 if self.sha256_name in self.parm:
1218 self.sha256_expected = self.parm[self.sha256_name]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001219 elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001220 self.sha256_expected = None
1221 else:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001222 self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001223 self.ignore_checksums = False
1224
1225 self.names = self.parm.get("name",'default').split(',')
1226
1227 self.method = None
1228 for m in methods:
1229 if m.supports(self, d):
1230 self.method = m
1231 break
1232
1233 if not self.method:
1234 raise NoMethodError(url)
1235
1236 if localonly and not isinstance(self.method, local.Local):
1237 raise NonLocalMethod()
1238
1239 if self.parm.get("proto", None) and "protocol" not in self.parm:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001240 logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001241 self.parm["protocol"] = self.parm.get("proto", None)
1242
1243 if hasattr(self.method, "urldata_init"):
1244 self.method.urldata_init(self, d)
1245
1246 if "localpath" in self.parm:
1247 # if user sets localpath for file, use it instead.
1248 self.localpath = self.parm["localpath"]
1249 self.basename = os.path.basename(self.localpath)
1250 elif self.localfile:
1251 self.localpath = self.method.localpath(self, d)
1252
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001253 dldir = d.getVar("DL_DIR")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001254
1255 if not self.needdonestamp:
1256 return
1257
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001258 # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
1259 if self.localpath and self.localpath.startswith(dldir):
1260 basepath = self.localpath
1261 elif self.localpath:
1262 basepath = dldir + os.sep + os.path.basename(self.localpath)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001263 elif self.basepath or self.basename:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001264 basepath = dldir + os.sep + (self.basepath or self.basename)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001265 else:
1266 bb.fatal("Can't determine lock path for url %s" % url)
1267
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001268 self.donestamp = basepath + '.done'
1269 self.lockfile = basepath + '.lock'
1270
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001271 def setup_revisions(self, d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001272 self.revisions = {}
1273 for name in self.names:
1274 self.revisions[name] = srcrev_internal_helper(self, d, name)
1275
1276 # add compatibility code for non name specified case
1277 if len(self.names) == 1:
1278 self.revision = self.revisions[self.names[0]]
1279
1280 def setup_localpath(self, d):
1281 if not self.localpath:
1282 self.localpath = self.method.localpath(self, d)
1283
1284 def getSRCDate(self, d):
1285 """
1286 Return the SRC Date for the component
1287
1288 d the bb.data module
1289 """
1290 if "srcdate" in self.parm:
1291 return self.parm['srcdate']
1292
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001293 pn = d.getVar("PN")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001294
1295 if pn:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001296 return d.getVar("SRCDATE_%s" % pn) or d.getVar("SRCDATE") or d.getVar("DATE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001297
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001298 return d.getVar("SRCDATE") or d.getVar("DATE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001299
1300class FetchMethod(object):
1301 """Base class for 'fetch'ing data"""
1302
1303 def __init__(self, urls=None):
1304 self.urls = []
1305
1306 def supports(self, urldata, d):
1307 """
1308 Check to see if this fetch class supports a given url.
1309 """
1310 return 0
1311
1312 def localpath(self, urldata, d):
1313 """
1314 Return the local filename of a given url assuming a successful fetch.
1315 Can also setup variables in urldata for use in go (saving code duplication
1316 and duplicate code execution)
1317 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001318 return os.path.join(d.getVar("DL_DIR"), urldata.localfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001319
1320 def supports_checksum(self, urldata):
1321 """
1322 Is localpath something that can be represented by a checksum?
1323 """
1324
1325 # We cannot compute checksums for directories
1326 if os.path.isdir(urldata.localpath) == True:
1327 return False
1328 if urldata.localpath.find("*") != -1:
1329 return False
1330
1331 return True
1332
1333 def recommends_checksum(self, urldata):
1334 """
1335 Is the backend on where checksumming is recommended (should warnings
1336 be displayed if there is no checksum)?
1337 """
1338 return False
1339
1340 def _strip_leading_slashes(self, relpath):
1341 """
1342 Remove leading slash as os.path.join can't cope
1343 """
1344 while os.path.isabs(relpath):
1345 relpath = relpath[1:]
1346 return relpath
1347
1348 def setUrls(self, urls):
1349 self.__urls = urls
1350
1351 def getUrls(self):
1352 return self.__urls
1353
1354 urls = property(getUrls, setUrls, None, "Urls property")
1355
1356 def need_update(self, ud, d):
1357 """
1358 Force a fetch, even if localpath exists?
1359 """
1360 if os.path.exists(ud.localpath):
1361 return False
1362 return True
1363
1364 def supports_srcrev(self):
1365 """
1366 The fetcher supports auto source revisions (SRCREV)
1367 """
1368 return False
1369
1370 def download(self, urldata, d):
1371 """
1372 Fetch urls
1373 Assumes localpath was called first
1374 """
1375 raise NoMethodError(url)
1376
1377 def unpack(self, urldata, rootdir, data):
1378 iterate = False
1379 file = urldata.localpath
1380
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001381 # Localpath can't deal with 'dir/*' entries, so it converts them to '.',
1382 # but it must be corrected back for local files copying
1383 if urldata.basename == '*' and file.endswith('/.'):
1384 file = '%s/%s' % (file.rstrip('/.'), urldata.path)
1385
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001386 try:
1387 unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
1388 except ValueError as exc:
1389 bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
1390 (file, urldata.parm.get('unpack')))
1391
1392 base, ext = os.path.splitext(file)
1393 if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']:
1394 efile = os.path.join(rootdir, os.path.basename(base))
1395 else:
1396 efile = file
1397 cmd = None
1398
1399 if unpack:
1400 if file.endswith('.tar'):
1401 cmd = 'tar x --no-same-owner -f %s' % file
1402 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
1403 cmd = 'tar xz --no-same-owner -f %s' % file
1404 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
1405 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
1406 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
1407 cmd = 'gzip -dc %s > %s' % (file, efile)
1408 elif file.endswith('.bz2'):
1409 cmd = 'bzip2 -dc %s > %s' % (file, efile)
1410 elif file.endswith('.tar.xz'):
1411 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
1412 elif file.endswith('.xz'):
1413 cmd = 'xz -dc %s > %s' % (file, efile)
1414 elif file.endswith('.tar.lz'):
1415 cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
1416 elif file.endswith('.lz'):
1417 cmd = 'lzip -dc %s > %s' % (file, efile)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001418 elif file.endswith('.tar.7z'):
1419 cmd = '7z x -so %s | tar x --no-same-owner -f -' % file
1420 elif file.endswith('.7z'):
1421 cmd = '7za x -y %s 1>/dev/null' % file
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001422 elif file.endswith('.zip') or file.endswith('.jar'):
1423 try:
1424 dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
1425 except ValueError as exc:
1426 bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
1427 (file, urldata.parm.get('dos')))
1428 cmd = 'unzip -q -o'
1429 if dos:
1430 cmd = '%s -a' % cmd
1431 cmd = "%s '%s'" % (cmd, file)
1432 elif file.endswith('.rpm') or file.endswith('.srpm'):
1433 if 'extract' in urldata.parm:
1434 unpack_file = urldata.parm.get('extract')
1435 cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
1436 iterate = True
1437 iterate_file = unpack_file
1438 else:
1439 cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
1440 elif file.endswith('.deb') or file.endswith('.ipk'):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001441 output = subprocess.check_output('ar -t %s' % file, preexec_fn=subprocess_setup, shell=True)
1442 datafile = None
1443 if output:
1444 for line in output.decode().splitlines():
1445 if line.startswith('data.tar.'):
1446 datafile = line
1447 break
1448 else:
1449 raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url)
1450 else:
1451 raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
1452 cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001453
1454 # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
1455 if 'subdir' in urldata.parm:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001456 subdir = urldata.parm.get('subdir')
1457 if os.path.isabs(subdir):
1458 if not os.path.realpath(subdir).startswith(os.path.realpath(rootdir)):
1459 raise UnpackError("subdir argument isn't a subdirectory of unpack root %s" % rootdir, urldata.url)
1460 unpackdir = subdir
1461 else:
1462 unpackdir = os.path.join(rootdir, subdir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001463 bb.utils.mkdirhier(unpackdir)
1464 else:
1465 unpackdir = rootdir
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001466
1467 if not unpack or not cmd:
1468 # If file == dest, then avoid any copies, as we already put the file into dest!
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001469 dest = os.path.join(unpackdir, os.path.basename(file))
1470 if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
1471 destdir = '.'
1472 # For file:// entries all intermediate dirs in path must be created at destination
1473 if urldata.type == "file":
1474 # Trailing '/' does a copying to wrong place
1475 urlpath = urldata.path.rstrip('/')
1476 # Want files places relative to cwd so no leading '/'
1477 urlpath = urlpath.lstrip('/')
1478 if urlpath.find("/") != -1:
1479 destdir = urlpath.rsplit("/", 1)[0] + '/'
1480 bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001481 cmd = 'cp -fpPRH %s %s' % (file, destdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001482
1483 if not cmd:
1484 return
1485
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001486 path = data.getVar('PATH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001487 if path:
1488 cmd = "PATH=\"%s\" %s" % (path, cmd)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001489 bb.note("Unpacking %s to %s/" % (file, unpackdir))
1490 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=unpackdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001491
1492 if ret != 0:
1493 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
1494
1495 if iterate is True:
1496 iterate_urldata = urldata
1497 iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
1498 self.unpack(urldata, rootdir, data)
1499
1500 return
1501
1502 def clean(self, urldata, d):
1503 """
1504 Clean any existing full or partial download
1505 """
1506 bb.utils.remove(urldata.localpath)
1507
1508 def try_premirror(self, urldata, d):
1509 """
1510 Should premirrors be used?
1511 """
1512 return True
1513
1514 def checkstatus(self, fetch, urldata, d):
1515 """
1516 Check the status of a URL
1517 Assumes localpath was called first
1518 """
1519 logger.info("URL %s could not be checked for status since no method exists.", url)
1520 return True
1521
1522 def latest_revision(self, ud, d, name):
1523 """
1524 Look in the cache for the latest revision, if not present ask the SCM.
1525 """
1526 if not hasattr(self, "_latest_revision"):
1527 raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
1528
1529 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
1530 key = self.generate_revision_key(ud, d, name)
1531 try:
1532 return revs[key]
1533 except KeyError:
1534 revs[key] = rev = self._latest_revision(ud, d, name)
1535 return rev
1536
1537 def sortable_revision(self, ud, d, name):
1538 latest_rev = self._build_revision(ud, d, name)
1539 return True, str(latest_rev)
1540
1541 def generate_revision_key(self, ud, d, name):
1542 key = self._revision_key(ud, d, name)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001543 return "%s-%s" % (key, d.getVar("PN") or "")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001544
1545class Fetch(object):
1546 def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
1547 if localonly and cache:
1548 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
1549
1550 if len(urls) == 0:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001551 urls = d.getVar("SRC_URI").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001552 self.urls = urls
1553 self.d = d
1554 self.ud = {}
1555 self.connection_cache = connection_cache
1556
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001557 fn = d.getVar('FILE')
1558 mc = d.getVar('__BBMULTICONFIG') or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001559 if cache and fn and mc + fn in urldata_cache:
1560 self.ud = urldata_cache[mc + fn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001561
1562 for url in urls:
1563 if url not in self.ud:
1564 try:
1565 self.ud[url] = FetchData(url, d, localonly)
1566 except NonLocalMethod:
1567 if localonly:
1568 self.ud[url] = None
1569 pass
1570
1571 if fn and cache:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001572 urldata_cache[mc + fn] = self.ud
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001573
1574 def localpath(self, url):
1575 if url not in self.urls:
1576 self.ud[url] = FetchData(url, self.d)
1577
1578 self.ud[url].setup_localpath(self.d)
1579 return self.d.expand(self.ud[url].localpath)
1580
1581 def localpaths(self):
1582 """
1583 Return a list of the local filenames, assuming successful fetch
1584 """
1585 local = []
1586
1587 for u in self.urls:
1588 ud = self.ud[u]
1589 ud.setup_localpath(self.d)
1590 local.append(ud.localpath)
1591
1592 return local
1593
1594 def download(self, urls=None):
1595 """
1596 Fetch all urls
1597 """
1598 if not urls:
1599 urls = self.urls
1600
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001601 network = self.d.getVar("BB_NO_NETWORK")
1602 premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY") == "1")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001603
1604 for u in urls:
1605 ud = self.ud[u]
1606 ud.setup_localpath(self.d)
1607 m = ud.method
1608 localpath = ""
1609
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001610 if ud.lockfile:
1611 lf = bb.utils.lockfile(ud.lockfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001612
1613 try:
1614 self.d.setVar("BB_NO_NETWORK", network)
1615
1616 if verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
1617 localpath = ud.localpath
1618 elif m.try_premirror(ud, self.d):
1619 logger.debug(1, "Trying PREMIRRORS")
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001620 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001621 localpath = try_mirrors(self, self.d, ud, mirrors, False)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001622 if localpath:
1623 try:
1624 # early checksum verification so that if the checksum of the premirror
1625 # contents mismatch the fetcher can still try upstream and mirrors
1626 update_stamp(ud, self.d)
1627 except ChecksumError as e:
1628 logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u)
1629 logger.debug(1, str(e))
1630 localpath = ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001631
1632 if premirroronly:
1633 self.d.setVar("BB_NO_NETWORK", "1")
1634
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001635 firsterr = None
1636 verified_stamp = verify_donestamp(ud, self.d)
1637 if not localpath and (not verified_stamp or m.need_update(ud, self.d)):
1638 try:
1639 if not trusted_network(self.d, ud.url):
1640 raise UntrustedUrl(ud.url)
1641 logger.debug(1, "Trying Upstream")
1642 m.download(ud, self.d)
1643 if hasattr(m, "build_mirror_data"):
1644 m.build_mirror_data(ud, self.d)
1645 localpath = ud.localpath
1646 # early checksum verify, so that if checksum mismatched,
1647 # fetcher still have chance to fetch from mirror
1648 update_stamp(ud, self.d)
1649
1650 except bb.fetch2.NetworkAccess:
1651 raise
1652
1653 except BBFetchException as e:
1654 if isinstance(e, ChecksumError):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001655 logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001656 logger.debug(1, str(e))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001657 if os.path.exists(ud.localpath):
1658 rename_bad_checksum(ud, e.checksum)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001659 elif isinstance(e, NoChecksumError):
1660 raise
1661 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001662 logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001663 logger.debug(1, str(e))
1664 firsterr = e
1665 # Remove any incomplete fetch
1666 if not verified_stamp:
1667 m.clean(ud, self.d)
1668 logger.debug(1, "Trying MIRRORS")
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001669 mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001670 localpath = try_mirrors(self, self.d, ud, mirrors)
1671
1672 if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
1673 if firsterr:
1674 logger.error(str(firsterr))
1675 raise FetchError("Unable to fetch URL from any source.", u)
1676
1677 update_stamp(ud, self.d)
1678
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001679 except IOError as e:
1680 if e.errno in [os.errno.ESTALE]:
1681 logger.error("Stale Error Observed %s." % u)
1682 raise ChecksumError("Stale Error Detected")
1683
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001684 except BBFetchException as e:
1685 if isinstance(e, ChecksumError):
1686 logger.error("Checksum failure fetching %s" % u)
1687 raise
1688
1689 finally:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001690 if ud.lockfile:
1691 bb.utils.unlockfile(lf)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001692
1693 def checkstatus(self, urls=None):
1694 """
1695 Check all urls exist upstream
1696 """
1697
1698 if not urls:
1699 urls = self.urls
1700
1701 for u in urls:
1702 ud = self.ud[u]
1703 ud.setup_localpath(self.d)
1704 m = ud.method
1705 logger.debug(1, "Testing URL %s", u)
1706 # First try checking uri, u, from PREMIRRORS
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001707 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001708 ret = try_mirrors(self, self.d, ud, mirrors, True)
1709 if not ret:
1710 # Next try checking from the original uri, u
1711 try:
1712 ret = m.checkstatus(self, ud, self.d)
1713 except:
1714 # Finally, try checking uri, u, from MIRRORS
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001715 mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001716 ret = try_mirrors(self, self.d, ud, mirrors, True)
1717
1718 if not ret:
1719 raise FetchError("URL %s doesn't work" % u, u)
1720
1721 def unpack(self, root, urls=None):
1722 """
1723 Check all urls exist upstream
1724 """
1725
1726 if not urls:
1727 urls = self.urls
1728
1729 for u in urls:
1730 ud = self.ud[u]
1731 ud.setup_localpath(self.d)
1732
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001733 if ud.lockfile:
1734 lf = bb.utils.lockfile(ud.lockfile)
1735
1736 ud.method.unpack(ud, root, self.d)
1737
1738 if ud.lockfile:
1739 bb.utils.unlockfile(lf)
1740
1741 def clean(self, urls=None):
1742 """
1743 Clean files that the fetcher gets or places
1744 """
1745
1746 if not urls:
1747 urls = self.urls
1748
1749 for url in urls:
1750 if url not in self.ud:
1751 self.ud[url] = FetchData(url, d)
1752 ud = self.ud[url]
1753 ud.setup_localpath(self.d)
1754
1755 if not ud.localfile and ud.localpath is None:
1756 continue
1757
1758 if ud.lockfile:
1759 lf = bb.utils.lockfile(ud.lockfile)
1760
1761 ud.method.clean(ud, self.d)
1762 if ud.donestamp:
1763 bb.utils.remove(ud.donestamp)
1764
1765 if ud.lockfile:
1766 bb.utils.unlockfile(lf)
1767
1768class FetchConnectionCache(object):
1769 """
1770 A class which represents an container for socket connections.
1771 """
1772 def __init__(self):
1773 self.cache = {}
1774
1775 def get_connection_name(self, host, port):
1776 return host + ':' + str(port)
1777
1778 def add_connection(self, host, port, connection):
1779 cn = self.get_connection_name(host, port)
1780
1781 if cn not in self.cache:
1782 self.cache[cn] = connection
1783
1784 def get_connection(self, host, port):
1785 connection = None
1786
1787 cn = self.get_connection_name(host, port)
1788 if cn in self.cache:
1789 connection = self.cache[cn]
1790
1791 return connection
1792
1793 def remove_connection(self, host, port):
1794 cn = self.get_connection_name(host, port)
1795 if cn in self.cache:
1796 self.cache[cn].close()
1797 del self.cache[cn]
1798
1799 def close_connections(self):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001800 for cn in list(self.cache.keys()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001801 self.cache[cn].close()
1802 del self.cache[cn]
1803
1804from . import cvs
1805from . import git
1806from . import gitsm
1807from . import gitannex
1808from . import local
1809from . import svn
1810from . import wget
1811from . import ssh
1812from . import sftp
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001813from . import s3
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001814from . import perforce
1815from . import bzr
1816from . import hg
1817from . import osc
1818from . import repo
1819from . import clearcase
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001820from . import npm
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001821
1822methods.append(local.Local())
1823methods.append(wget.Wget())
1824methods.append(svn.Svn())
1825methods.append(git.Git())
1826methods.append(gitsm.GitSM())
1827methods.append(gitannex.GitANNEX())
1828methods.append(cvs.Cvs())
1829methods.append(ssh.SSH())
1830methods.append(sftp.SFTP())
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001831methods.append(s3.S3())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001832methods.append(perforce.Perforce())
1833methods.append(bzr.Bzr())
1834methods.append(hg.Hg())
1835methods.append(osc.Osc())
1836methods.append(repo.Repo())
1837methods.append(clearcase.ClearCase())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001838methods.append(npm.Npm())