Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1 | # ex:ts=4:sw=4:sts=4:et |
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- |
| 3 | """ |
| 4 | BitBake 'Fetch' implementations |
| 5 | |
| 6 | Classes for obtaining upstream sources for the |
| 7 | BitBake build tools. |
| 8 | """ |
| 9 | |
| 10 | # Copyright (C) 2003, 2004 Chris Larson |
| 11 | # Copyright (C) 2012 Intel Corporation |
| 12 | # |
| 13 | # This program is free software; you can redistribute it and/or modify |
| 14 | # it under the terms of the GNU General Public License version 2 as |
| 15 | # published by the Free Software Foundation. |
| 16 | # |
| 17 | # This program is distributed in the hope that it will be useful, |
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 20 | # GNU General Public License for more details. |
| 21 | # |
| 22 | # You should have received a copy of the GNU General Public License along |
| 23 | # with this program; if not, write to the Free Software Foundation, Inc., |
| 24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
| 25 | # |
| 26 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig |
| 27 | |
| 28 | from __future__ import absolute_import |
| 29 | from __future__ import print_function |
| 30 | import os, re |
| 31 | import signal |
| 32 | import glob |
| 33 | import logging |
| 34 | import urllib |
| 35 | import urlparse |
| 36 | import operator |
| 37 | import bb.persist_data, bb.utils |
| 38 | import bb.checksum |
| 39 | from bb import data |
| 40 | import bb.process |
| 41 | import subprocess |
| 42 | |
| 43 | __version__ = "2" |
| 44 | _checksum_cache = bb.checksum.FileChecksumCache() |
| 45 | |
| 46 | logger = logging.getLogger("BitBake.Fetcher") |
| 47 | |
| 48 | try: |
| 49 | import cPickle as pickle |
| 50 | except ImportError: |
| 51 | import pickle |
| 52 | logger.info("Importing cPickle failed. " |
| 53 | "Falling back to a very slow implementation.") |
| 54 | |
| 55 | class BBFetchException(Exception): |
| 56 | """Class all fetch exceptions inherit from""" |
| 57 | def __init__(self, message): |
| 58 | self.msg = message |
| 59 | Exception.__init__(self, message) |
| 60 | |
| 61 | def __str__(self): |
| 62 | return self.msg |
| 63 | |
| 64 | class UntrustedUrl(BBFetchException): |
| 65 | """Exception raised when encountering a host not listed in BB_ALLOWED_NETWORKS""" |
| 66 | def __init__(self, url, message=''): |
| 67 | if message: |
| 68 | msg = message |
| 69 | else: |
| 70 | msg = "The URL: '%s' is not trusted and cannot be used" % url |
| 71 | self.url = url |
| 72 | BBFetchException.__init__(self, msg) |
| 73 | self.args = (url,) |
| 74 | |
| 75 | class MalformedUrl(BBFetchException): |
| 76 | """Exception raised when encountering an invalid url""" |
| 77 | def __init__(self, url, message=''): |
| 78 | if message: |
| 79 | msg = message |
| 80 | else: |
| 81 | msg = "The URL: '%s' is invalid and cannot be interpreted" % url |
| 82 | self.url = url |
| 83 | BBFetchException.__init__(self, msg) |
| 84 | self.args = (url,) |
| 85 | |
| 86 | class FetchError(BBFetchException): |
| 87 | """General fetcher exception when something happens incorrectly""" |
| 88 | def __init__(self, message, url = None): |
| 89 | if url: |
| 90 | msg = "Fetcher failure for URL: '%s'. %s" % (url, message) |
| 91 | else: |
| 92 | msg = "Fetcher failure: %s" % message |
| 93 | self.url = url |
| 94 | BBFetchException.__init__(self, msg) |
| 95 | self.args = (message, url) |
| 96 | |
| 97 | class ChecksumError(FetchError): |
| 98 | """Exception when mismatched checksum encountered""" |
| 99 | def __init__(self, message, url = None, checksum = None): |
| 100 | self.checksum = checksum |
| 101 | FetchError.__init__(self, message, url) |
| 102 | |
| 103 | class NoChecksumError(FetchError): |
| 104 | """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set""" |
| 105 | |
| 106 | class UnpackError(BBFetchException): |
| 107 | """General fetcher exception when something happens incorrectly when unpacking""" |
| 108 | def __init__(self, message, url): |
| 109 | msg = "Unpack failure for URL: '%s'. %s" % (url, message) |
| 110 | self.url = url |
| 111 | BBFetchException.__init__(self, msg) |
| 112 | self.args = (message, url) |
| 113 | |
| 114 | class NoMethodError(BBFetchException): |
| 115 | """Exception raised when there is no method to obtain a supplied url or set of urls""" |
| 116 | def __init__(self, url): |
| 117 | msg = "Could not find a fetcher which supports the URL: '%s'" % url |
| 118 | self.url = url |
| 119 | BBFetchException.__init__(self, msg) |
| 120 | self.args = (url,) |
| 121 | |
| 122 | class MissingParameterError(BBFetchException): |
| 123 | """Exception raised when a fetch method is missing a critical parameter in the url""" |
| 124 | def __init__(self, missing, url): |
| 125 | msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing) |
| 126 | self.url = url |
| 127 | self.missing = missing |
| 128 | BBFetchException.__init__(self, msg) |
| 129 | self.args = (missing, url) |
| 130 | |
| 131 | class ParameterError(BBFetchException): |
| 132 | """Exception raised when a url cannot be proccessed due to invalid parameters.""" |
| 133 | def __init__(self, message, url): |
| 134 | msg = "URL: '%s' has invalid parameters. %s" % (url, message) |
| 135 | self.url = url |
| 136 | BBFetchException.__init__(self, msg) |
| 137 | self.args = (message, url) |
| 138 | |
| 139 | class NetworkAccess(BBFetchException): |
| 140 | """Exception raised when network access is disabled but it is required.""" |
| 141 | def __init__(self, url, cmd): |
| 142 | msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url) |
| 143 | self.url = url |
| 144 | self.cmd = cmd |
| 145 | BBFetchException.__init__(self, msg) |
| 146 | self.args = (url, cmd) |
| 147 | |
| 148 | class NonLocalMethod(Exception): |
| 149 | def __init__(self): |
| 150 | Exception.__init__(self) |
| 151 | |
| 152 | |
| 153 | class URI(object): |
| 154 | """ |
| 155 | A class representing a generic URI, with methods for |
| 156 | accessing the URI components, and stringifies to the |
| 157 | URI. |
| 158 | |
| 159 | It is constructed by calling it with a URI, or setting |
| 160 | the attributes manually: |
| 161 | |
| 162 | uri = URI("http://example.com/") |
| 163 | |
| 164 | uri = URI() |
| 165 | uri.scheme = 'http' |
| 166 | uri.hostname = 'example.com' |
| 167 | uri.path = '/' |
| 168 | |
| 169 | It has the following attributes: |
| 170 | |
| 171 | * scheme (read/write) |
| 172 | * userinfo (authentication information) (read/write) |
| 173 | * username (read/write) |
| 174 | * password (read/write) |
| 175 | |
| 176 | Note, password is deprecated as of RFC 3986. |
| 177 | |
| 178 | * hostname (read/write) |
| 179 | * port (read/write) |
| 180 | * hostport (read only) |
| 181 | "hostname:port", if both are set, otherwise just "hostname" |
| 182 | * path (read/write) |
| 183 | * path_quoted (read/write) |
| 184 | A URI quoted version of path |
| 185 | * params (dict) (read/write) |
| 186 | * query (dict) (read/write) |
| 187 | * relative (bool) (read only) |
| 188 | True if this is a "relative URI", (e.g. file:foo.diff) |
| 189 | |
| 190 | It stringifies to the URI itself. |
| 191 | |
| 192 | Some notes about relative URIs: while it's specified that |
| 193 | a URI beginning with <scheme>:// should either be directly |
| 194 | followed by a hostname or a /, the old URI handling of the |
| 195 | fetch2 library did not comform to this. Therefore, this URI |
| 196 | class has some kludges to make sure that URIs are parsed in |
| 197 | a way comforming to bitbake's current usage. This URI class |
| 198 | supports the following: |
| 199 | |
| 200 | file:relative/path.diff (IETF compliant) |
| 201 | git:relative/path.git (IETF compliant) |
| 202 | git:///absolute/path.git (IETF compliant) |
| 203 | file:///absolute/path.diff (IETF compliant) |
| 204 | |
| 205 | file://relative/path.diff (not IETF compliant) |
| 206 | |
| 207 | But it does not support the following: |
| 208 | |
| 209 | file://hostname/absolute/path.diff (would be IETF compliant) |
| 210 | |
| 211 | Note that the last case only applies to a list of |
| 212 | "whitelisted" schemes (currently only file://), that requires |
| 213 | its URIs to not have a network location. |
| 214 | """ |
| 215 | |
| 216 | _relative_schemes = ['file', 'git'] |
| 217 | _netloc_forbidden = ['file'] |
| 218 | |
| 219 | def __init__(self, uri=None): |
| 220 | self.scheme = '' |
| 221 | self.userinfo = '' |
| 222 | self.hostname = '' |
| 223 | self.port = None |
| 224 | self._path = '' |
| 225 | self.params = {} |
| 226 | self.query = {} |
| 227 | self.relative = False |
| 228 | |
| 229 | if not uri: |
| 230 | return |
| 231 | |
| 232 | # We hijack the URL parameters, since the way bitbake uses |
| 233 | # them are not quite RFC compliant. |
| 234 | uri, param_str = (uri.split(";", 1) + [None])[:2] |
| 235 | |
| 236 | urlp = urlparse.urlparse(uri) |
| 237 | self.scheme = urlp.scheme |
| 238 | |
| 239 | reparse = 0 |
| 240 | |
| 241 | # Coerce urlparse to make URI scheme use netloc |
| 242 | if not self.scheme in urlparse.uses_netloc: |
| 243 | urlparse.uses_params.append(self.scheme) |
| 244 | reparse = 1 |
| 245 | |
| 246 | # Make urlparse happy(/ier) by converting local resources |
| 247 | # to RFC compliant URL format. E.g.: |
| 248 | # file://foo.diff -> file:foo.diff |
| 249 | if urlp.scheme in self._netloc_forbidden: |
| 250 | uri = re.sub("(?<=:)//(?!/)", "", uri, 1) |
| 251 | reparse = 1 |
| 252 | |
| 253 | if reparse: |
| 254 | urlp = urlparse.urlparse(uri) |
| 255 | |
| 256 | # Identify if the URI is relative or not |
| 257 | if urlp.scheme in self._relative_schemes and \ |
| 258 | re.compile("^\w+:(?!//)").match(uri): |
| 259 | self.relative = True |
| 260 | |
| 261 | if not self.relative: |
| 262 | self.hostname = urlp.hostname or '' |
| 263 | self.port = urlp.port |
| 264 | |
| 265 | self.userinfo += urlp.username or '' |
| 266 | |
| 267 | if urlp.password: |
| 268 | self.userinfo += ':%s' % urlp.password |
| 269 | |
| 270 | self.path = urllib.unquote(urlp.path) |
| 271 | |
| 272 | if param_str: |
| 273 | self.params = self._param_str_split(param_str, ";") |
| 274 | if urlp.query: |
| 275 | self.query = self._param_str_split(urlp.query, "&") |
| 276 | |
| 277 | def __str__(self): |
| 278 | userinfo = self.userinfo |
| 279 | if userinfo: |
| 280 | userinfo += '@' |
| 281 | |
| 282 | return "%s:%s%s%s%s%s%s" % ( |
| 283 | self.scheme, |
| 284 | '' if self.relative else '//', |
| 285 | userinfo, |
| 286 | self.hostport, |
| 287 | self.path_quoted, |
| 288 | self._query_str(), |
| 289 | self._param_str()) |
| 290 | |
| 291 | def _param_str(self): |
| 292 | return ( |
| 293 | ''.join([';', self._param_str_join(self.params, ";")]) |
| 294 | if self.params else '') |
| 295 | |
| 296 | def _query_str(self): |
| 297 | return ( |
| 298 | ''.join(['?', self._param_str_join(self.query, "&")]) |
| 299 | if self.query else '') |
| 300 | |
| 301 | def _param_str_split(self, string, elmdelim, kvdelim="="): |
| 302 | ret = {} |
| 303 | for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]: |
| 304 | ret[k] = v |
| 305 | return ret |
| 306 | |
| 307 | def _param_str_join(self, dict_, elmdelim, kvdelim="="): |
| 308 | return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()]) |
| 309 | |
| 310 | @property |
| 311 | def hostport(self): |
| 312 | if not self.port: |
| 313 | return self.hostname |
| 314 | return "%s:%d" % (self.hostname, self.port) |
| 315 | |
| 316 | @property |
| 317 | def path_quoted(self): |
| 318 | return urllib.quote(self.path) |
| 319 | |
| 320 | @path_quoted.setter |
| 321 | def path_quoted(self, path): |
| 322 | self.path = urllib.unquote(path) |
| 323 | |
| 324 | @property |
| 325 | def path(self): |
| 326 | return self._path |
| 327 | |
| 328 | @path.setter |
| 329 | def path(self, path): |
| 330 | self._path = path |
| 331 | |
| 332 | if re.compile("^/").match(path): |
| 333 | self.relative = False |
| 334 | else: |
| 335 | self.relative = True |
| 336 | |
| 337 | @property |
| 338 | def username(self): |
| 339 | if self.userinfo: |
| 340 | return (self.userinfo.split(":", 1))[0] |
| 341 | return '' |
| 342 | |
| 343 | @username.setter |
| 344 | def username(self, username): |
| 345 | password = self.password |
| 346 | self.userinfo = username |
| 347 | if password: |
| 348 | self.userinfo += ":%s" % password |
| 349 | |
| 350 | @property |
| 351 | def password(self): |
| 352 | if self.userinfo and ":" in self.userinfo: |
| 353 | return (self.userinfo.split(":", 1))[1] |
| 354 | return '' |
| 355 | |
| 356 | @password.setter |
| 357 | def password(self, password): |
| 358 | self.userinfo = "%s:%s" % (self.username, password) |
| 359 | |
| 360 | def decodeurl(url): |
| 361 | """Decodes an URL into the tokens (scheme, network location, path, |
| 362 | user, password, parameters). |
| 363 | """ |
| 364 | |
| 365 | m = re.compile('(?P<type>[^:]*)://((?P<user>[^/]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url) |
| 366 | if not m: |
| 367 | raise MalformedUrl(url) |
| 368 | |
| 369 | type = m.group('type') |
| 370 | location = m.group('location') |
| 371 | if not location: |
| 372 | raise MalformedUrl(url) |
| 373 | user = m.group('user') |
| 374 | parm = m.group('parm') |
| 375 | |
| 376 | locidx = location.find('/') |
| 377 | if locidx != -1 and type.lower() != 'file': |
| 378 | host = location[:locidx] |
| 379 | path = location[locidx:] |
| 380 | else: |
| 381 | host = "" |
| 382 | path = location |
| 383 | if user: |
| 384 | m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user) |
| 385 | if m: |
| 386 | user = m.group('user') |
| 387 | pswd = m.group('pswd') |
| 388 | else: |
| 389 | user = '' |
| 390 | pswd = '' |
| 391 | |
| 392 | p = {} |
| 393 | if parm: |
| 394 | for s in parm.split(';'): |
| 395 | if s: |
| 396 | if not '=' in s: |
| 397 | raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s)) |
| 398 | s1, s2 = s.split('=') |
| 399 | p[s1] = s2 |
| 400 | |
| 401 | return type, host, urllib.unquote(path), user, pswd, p |
| 402 | |
| 403 | def encodeurl(decoded): |
| 404 | """Encodes a URL from tokens (scheme, network location, path, |
| 405 | user, password, parameters). |
| 406 | """ |
| 407 | |
| 408 | type, host, path, user, pswd, p = decoded |
| 409 | |
| 410 | if not path: |
| 411 | raise MissingParameterError('path', "encoded from the data %s" % str(decoded)) |
| 412 | if not type: |
| 413 | raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) |
| 414 | url = '%s://' % type |
| 415 | if user and type != "file": |
| 416 | url += "%s" % user |
| 417 | if pswd: |
| 418 | url += ":%s" % pswd |
| 419 | url += "@" |
| 420 | if host and type != "file": |
| 421 | url += "%s" % host |
| 422 | # Standardise path to ensure comparisons work |
| 423 | while '//' in path: |
| 424 | path = path.replace("//", "/") |
| 425 | url += "%s" % urllib.quote(path) |
| 426 | if p: |
| 427 | for parm in p: |
| 428 | url += ";%s=%s" % (parm, p[parm]) |
| 429 | |
| 430 | return url |
| 431 | |
| 432 | def uri_replace(ud, uri_find, uri_replace, replacements, d): |
| 433 | if not ud.url or not uri_find or not uri_replace: |
| 434 | logger.error("uri_replace: passed an undefined value, not replacing") |
| 435 | return None |
| 436 | uri_decoded = list(decodeurl(ud.url)) |
| 437 | uri_find_decoded = list(decodeurl(uri_find)) |
| 438 | uri_replace_decoded = list(decodeurl(uri_replace)) |
| 439 | logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) |
| 440 | result_decoded = ['', '', '', '', '', {}] |
| 441 | for loc, i in enumerate(uri_find_decoded): |
| 442 | result_decoded[loc] = uri_decoded[loc] |
| 443 | regexp = i |
| 444 | if loc == 0 and regexp and not regexp.endswith("$"): |
| 445 | # Leaving the type unanchored can mean "https" matching "file" can become "files" |
| 446 | # which is clearly undesirable. |
| 447 | regexp += "$" |
| 448 | if loc == 5: |
| 449 | # Handle URL parameters |
| 450 | if i: |
| 451 | # Any specified URL parameters must match |
| 452 | for k in uri_replace_decoded[loc]: |
| 453 | if uri_decoded[loc][k] != uri_replace_decoded[loc][k]: |
| 454 | return None |
| 455 | # Overwrite any specified replacement parameters |
| 456 | for k in uri_replace_decoded[loc]: |
| 457 | for l in replacements: |
| 458 | uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l]) |
| 459 | result_decoded[loc][k] = uri_replace_decoded[loc][k] |
| 460 | elif (re.match(regexp, uri_decoded[loc])): |
| 461 | if not uri_replace_decoded[loc]: |
| 462 | result_decoded[loc] = "" |
| 463 | else: |
| 464 | for k in replacements: |
| 465 | uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k]) |
| 466 | #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc])) |
Patrick Williams | d7e9631 | 2015-09-22 08:09:05 -0500 | [diff] [blame^] | 467 | result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 468 | if loc == 2: |
| 469 | # Handle path manipulations |
| 470 | basename = None |
| 471 | if uri_decoded[0] != uri_replace_decoded[0] and ud.mirrortarball: |
| 472 | # If the source and destination url types differ, must be a mirrortarball mapping |
| 473 | basename = os.path.basename(ud.mirrortarball) |
| 474 | # Kill parameters, they make no sense for mirror tarballs |
| 475 | uri_decoded[5] = {} |
| 476 | elif ud.localpath and ud.method.supports_checksum(ud): |
| 477 | basename = os.path.basename(ud.localpath) |
| 478 | if basename and not result_decoded[loc].endswith(basename): |
| 479 | result_decoded[loc] = os.path.join(result_decoded[loc], basename) |
| 480 | else: |
| 481 | return None |
| 482 | result = encodeurl(result_decoded) |
| 483 | if result == ud.url: |
| 484 | return None |
| 485 | logger.debug(2, "For url %s returning %s" % (ud.url, result)) |
| 486 | return result |
| 487 | |
| 488 | methods = [] |
| 489 | urldata_cache = {} |
| 490 | saved_headrevs = {} |
| 491 | |
| 492 | def fetcher_init(d): |
| 493 | """ |
| 494 | Called to initialize the fetchers once the configuration data is known. |
| 495 | Calls before this must not hit the cache. |
| 496 | """ |
| 497 | # When to drop SCM head revisions controlled by user policy |
| 498 | srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear" |
| 499 | if srcrev_policy == "cache": |
| 500 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) |
| 501 | elif srcrev_policy == "clear": |
| 502 | logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) |
| 503 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) |
| 504 | try: |
| 505 | bb.fetch2.saved_headrevs = revs.items() |
| 506 | except: |
| 507 | pass |
| 508 | revs.clear() |
| 509 | else: |
| 510 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) |
| 511 | |
| 512 | _checksum_cache.init_cache(d) |
| 513 | |
| 514 | for m in methods: |
| 515 | if hasattr(m, "init"): |
| 516 | m.init(d) |
| 517 | |
| 518 | def fetcher_parse_save(d): |
| 519 | _checksum_cache.save_extras(d) |
| 520 | |
| 521 | def fetcher_parse_done(d): |
| 522 | _checksum_cache.save_merge(d) |
| 523 | |
| 524 | def fetcher_compare_revisions(d): |
| 525 | """ |
| 526 | Compare the revisions in the persistant cache with current values and |
| 527 | return true/false on whether they've changed. |
| 528 | """ |
| 529 | |
| 530 | data = bb.persist_data.persist('BB_URI_HEADREVS', d).items() |
| 531 | data2 = bb.fetch2.saved_headrevs |
| 532 | |
| 533 | changed = False |
| 534 | for key in data: |
| 535 | if key not in data2 or data2[key] != data[key]: |
| 536 | logger.debug(1, "%s changed", key) |
| 537 | changed = True |
| 538 | return True |
| 539 | else: |
| 540 | logger.debug(2, "%s did not change", key) |
| 541 | return False |
| 542 | |
| 543 | def mirror_from_string(data): |
| 544 | return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ] |
| 545 | |
| 546 | def verify_checksum(ud, d, precomputed={}): |
| 547 | """ |
| 548 | verify the MD5 and SHA256 checksum for downloaded src |
| 549 | |
| 550 | Raises a FetchError if one or both of the SRC_URI checksums do not match |
| 551 | the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no |
| 552 | checksums specified. |
| 553 | |
| 554 | Returns a dict of checksums that can be stored in a done stamp file and |
| 555 | passed in as precomputed parameter in a later call to avoid re-computing |
| 556 | the checksums from the file. This allows verifying the checksums of the |
| 557 | file against those in the recipe each time, rather than only after |
| 558 | downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571. |
| 559 | """ |
| 560 | |
| 561 | _MD5_KEY = "md5" |
| 562 | _SHA256_KEY = "sha256" |
| 563 | |
| 564 | if ud.ignore_checksums or not ud.method.supports_checksum(ud): |
| 565 | return {} |
| 566 | |
| 567 | if _MD5_KEY in precomputed: |
| 568 | md5data = precomputed[_MD5_KEY] |
| 569 | else: |
| 570 | md5data = bb.utils.md5_file(ud.localpath) |
| 571 | |
| 572 | if _SHA256_KEY in precomputed: |
| 573 | sha256data = precomputed[_SHA256_KEY] |
| 574 | else: |
| 575 | sha256data = bb.utils.sha256_file(ud.localpath) |
| 576 | |
| 577 | if ud.method.recommends_checksum(ud): |
| 578 | # If strict checking enabled and neither sum defined, raise error |
| 579 | strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0" |
| 580 | if (strict == "1") and not (ud.md5_expected or ud.sha256_expected): |
| 581 | logger.error('No checksum specified for %s, please add at least one to the recipe:\n' |
| 582 | 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' % |
| 583 | (ud.localpath, ud.md5_name, md5data, |
| 584 | ud.sha256_name, sha256data)) |
| 585 | raise NoChecksumError('Missing SRC_URI checksum', ud.url) |
| 586 | |
| 587 | # Log missing sums so user can more easily add them |
| 588 | if not ud.md5_expected: |
| 589 | logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n' |
| 590 | 'SRC_URI[%s] = "%s"', |
| 591 | ud.localpath, ud.md5_name, md5data) |
| 592 | |
| 593 | if not ud.sha256_expected: |
| 594 | logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n' |
| 595 | 'SRC_URI[%s] = "%s"', |
| 596 | ud.localpath, ud.sha256_name, sha256data) |
| 597 | |
| 598 | md5mismatch = False |
| 599 | sha256mismatch = False |
| 600 | |
| 601 | if ud.md5_expected != md5data: |
| 602 | md5mismatch = True |
| 603 | |
| 604 | if ud.sha256_expected != sha256data: |
| 605 | sha256mismatch = True |
| 606 | |
| 607 | # We want to alert the user if a checksum is defined in the recipe but |
| 608 | # it does not match. |
| 609 | msg = "" |
| 610 | mismatch = False |
| 611 | if md5mismatch and ud.md5_expected: |
| 612 | msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected) |
| 613 | mismatch = True; |
| 614 | |
| 615 | if sha256mismatch and ud.sha256_expected: |
| 616 | msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected) |
| 617 | mismatch = True; |
| 618 | |
| 619 | if mismatch: |
| 620 | msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data) |
| 621 | |
| 622 | if len(msg): |
| 623 | raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data) |
| 624 | |
| 625 | return { |
| 626 | _MD5_KEY: md5data, |
| 627 | _SHA256_KEY: sha256data |
| 628 | } |
| 629 | |
| 630 | |
| 631 | def verify_donestamp(ud, d, origud=None): |
| 632 | """ |
| 633 | Check whether the done stamp file has the right checksums (if the fetch |
| 634 | method supports them). If it doesn't, delete the done stamp and force |
| 635 | a re-download. |
| 636 | |
| 637 | Returns True, if the donestamp exists and is valid, False otherwise. When |
| 638 | returning False, any existing done stamps are removed. |
| 639 | """ |
| 640 | if not os.path.exists(ud.donestamp): |
| 641 | return False |
| 642 | |
| 643 | if (not ud.method.supports_checksum(ud) or |
| 644 | (origud and not origud.method.supports_checksum(origud))): |
| 645 | # done stamp exists, checksums not supported; assume the local file is |
| 646 | # current |
| 647 | return True |
| 648 | |
| 649 | if not os.path.exists(ud.localpath): |
| 650 | # done stamp exists, but the downloaded file does not; the done stamp |
| 651 | # must be incorrect, re-trigger the download |
| 652 | bb.utils.remove(ud.donestamp) |
| 653 | return False |
| 654 | |
| 655 | precomputed_checksums = {} |
| 656 | # Only re-use the precomputed checksums if the donestamp is newer than the |
| 657 | # file. Do not rely on the mtime of directories, though. If ud.localpath is |
| 658 | # a directory, there will probably not be any checksums anyway. |
| 659 | if (os.path.isdir(ud.localpath) or |
| 660 | os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)): |
| 661 | try: |
| 662 | with open(ud.donestamp, "rb") as cachefile: |
| 663 | pickled = pickle.Unpickler(cachefile) |
| 664 | precomputed_checksums.update(pickled.load()) |
| 665 | except Exception as e: |
| 666 | # Avoid the warnings on the upgrade path from emtpy done stamp |
| 667 | # files to those containing the checksums. |
| 668 | if not isinstance(e, EOFError): |
| 669 | # Ignore errors, they aren't fatal |
| 670 | logger.warn("Couldn't load checksums from donestamp %s: %s " |
| 671 | "(msg: %s)" % (ud.donestamp, type(e).__name__, |
| 672 | str(e))) |
| 673 | |
| 674 | try: |
| 675 | checksums = verify_checksum(ud, d, precomputed_checksums) |
| 676 | # If the cache file did not have the checksums, compute and store them |
| 677 | # as an upgrade path from the previous done stamp file format. |
| 678 | if checksums != precomputed_checksums: |
| 679 | with open(ud.donestamp, "wb") as cachefile: |
| 680 | p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL) |
| 681 | p.dump(checksums) |
| 682 | return True |
| 683 | except ChecksumError as e: |
| 684 | # Checksums failed to verify, trigger re-download and remove the |
| 685 | # incorrect stamp file. |
| 686 | logger.warn("Checksum mismatch for local file %s\n" |
| 687 | "Cleaning and trying again." % ud.localpath) |
| 688 | rename_bad_checksum(ud, e.checksum) |
| 689 | bb.utils.remove(ud.donestamp) |
| 690 | return False |
| 691 | |
| 692 | |
| 693 | def update_stamp(ud, d): |
| 694 | """ |
| 695 | donestamp is file stamp indicating the whole fetching is done |
| 696 | this function update the stamp after verifying the checksum |
| 697 | """ |
| 698 | if os.path.exists(ud.donestamp): |
| 699 | # Touch the done stamp file to show active use of the download |
| 700 | try: |
| 701 | os.utime(ud.donestamp, None) |
| 702 | except: |
| 703 | # Errors aren't fatal here |
| 704 | pass |
| 705 | else: |
| 706 | checksums = verify_checksum(ud, d) |
| 707 | # Store the checksums for later re-verification against the recipe |
| 708 | with open(ud.donestamp, "wb") as cachefile: |
| 709 | p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL) |
| 710 | p.dump(checksums) |
| 711 | |
| 712 | def subprocess_setup(): |
| 713 | # Python installs a SIGPIPE handler by default. This is usually not what |
| 714 | # non-Python subprocesses expect. |
| 715 | # SIGPIPE errors are known issues with gzip/bash |
| 716 | signal.signal(signal.SIGPIPE, signal.SIG_DFL) |
| 717 | |
| 718 | def get_autorev(d): |
| 719 | # only not cache src rev in autorev case |
| 720 | if d.getVar('BB_SRCREV_POLICY', True) != "cache": |
| 721 | d.setVar('__BB_DONT_CACHE', '1') |
| 722 | return "AUTOINC" |
| 723 | |
| 724 | def get_srcrev(d, method_name='sortable_revision'): |
| 725 | """ |
| 726 | Return the revsion string, usually for use in the version string (PV) of the current package |
| 727 | Most packages usually only have one SCM so we just pass on the call. |
| 728 | In the multi SCM case, we build a value based on SRCREV_FORMAT which must |
| 729 | have been set. |
| 730 | |
| 731 | The idea here is that we put the string "AUTOINC+" into return value if the revisions are not |
| 732 | incremental, other code is then responsible for turning that into an increasing value (if needed) |
| 733 | |
| 734 | A method_name can be supplied to retrieve an alternatively formatted revision from a fetcher, if |
| 735 | that fetcher provides a method with the given name and the same signature as sortable_revision. |
| 736 | """ |
| 737 | |
| 738 | scms = [] |
| 739 | fetcher = Fetch(d.getVar('SRC_URI', True).split(), d) |
| 740 | urldata = fetcher.ud |
| 741 | for u in urldata: |
| 742 | if urldata[u].method.supports_srcrev(): |
| 743 | scms.append(u) |
| 744 | |
| 745 | if len(scms) == 0: |
| 746 | raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI") |
| 747 | |
| 748 | if len(scms) == 1 and len(urldata[scms[0]].names) == 1: |
| 749 | autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0]) |
| 750 | if len(rev) > 10: |
| 751 | rev = rev[:10] |
| 752 | if autoinc: |
| 753 | return "AUTOINC+" + rev |
| 754 | return rev |
| 755 | |
| 756 | # |
| 757 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT |
| 758 | # |
| 759 | format = d.getVar('SRCREV_FORMAT', True) |
| 760 | if not format: |
| 761 | raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") |
| 762 | |
| 763 | seenautoinc = False |
| 764 | for scm in scms: |
| 765 | ud = urldata[scm] |
| 766 | for name in ud.names: |
| 767 | autoinc, rev = getattr(ud.method, method_name)(ud, d, name) |
| 768 | seenautoinc = seenautoinc or autoinc |
| 769 | if len(rev) > 10: |
| 770 | rev = rev[:10] |
| 771 | format = format.replace(name, rev) |
| 772 | if seenautoinc: |
| 773 | format = "AUTOINC+" + format |
| 774 | |
| 775 | return format |
| 776 | |
| 777 | def localpath(url, d): |
| 778 | fetcher = bb.fetch2.Fetch([url], d) |
| 779 | return fetcher.localpath(url) |
| 780 | |
| 781 | def runfetchcmd(cmd, d, quiet=False, cleanup=None): |
| 782 | """ |
| 783 | Run cmd returning the command output |
| 784 | Raise an error if interrupted or cmd fails |
| 785 | Optionally echo command output to stdout |
| 786 | Optionally remove the files/directories listed in cleanup upon failure |
| 787 | """ |
| 788 | |
| 789 | # Need to export PATH as binary could be in metadata paths |
| 790 | # rather than host provided |
| 791 | # Also include some other variables. |
| 792 | # FIXME: Should really include all export varaiables? |
| 793 | exportvars = ['HOME', 'PATH', |
| 794 | 'HTTP_PROXY', 'http_proxy', |
| 795 | 'HTTPS_PROXY', 'https_proxy', |
| 796 | 'FTP_PROXY', 'ftp_proxy', |
| 797 | 'FTPS_PROXY', 'ftps_proxy', |
| 798 | 'NO_PROXY', 'no_proxy', |
| 799 | 'ALL_PROXY', 'all_proxy', |
| 800 | 'GIT_PROXY_COMMAND', |
| 801 | 'GIT_SSL_CAINFO', |
| 802 | 'GIT_SMART_HTTP', |
| 803 | 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', |
| 804 | 'SOCKS5_USER', 'SOCKS5_PASSWD'] |
| 805 | |
| 806 | if not cleanup: |
| 807 | cleanup = [] |
| 808 | |
| 809 | for var in exportvars: |
| 810 | val = d.getVar(var, True) |
| 811 | if val: |
| 812 | cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) |
| 813 | |
| 814 | logger.debug(1, "Running %s", cmd) |
| 815 | |
| 816 | success = False |
| 817 | error_message = "" |
| 818 | |
| 819 | try: |
| 820 | (output, errors) = bb.process.run(cmd, shell=True, stderr=subprocess.PIPE) |
| 821 | success = True |
| 822 | except bb.process.NotFoundError as e: |
| 823 | error_message = "Fetch command %s" % (e.command) |
| 824 | except bb.process.ExecutionError as e: |
| 825 | if e.stdout: |
| 826 | output = "output:\n%s\n%s" % (e.stdout, e.stderr) |
| 827 | elif e.stderr: |
| 828 | output = "output:\n%s" % e.stderr |
| 829 | else: |
| 830 | output = "no output" |
| 831 | error_message = "Fetch command failed with exit code %s, %s" % (e.exitcode, output) |
| 832 | except bb.process.CmdError as e: |
| 833 | error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg) |
| 834 | if not success: |
| 835 | for f in cleanup: |
| 836 | try: |
| 837 | bb.utils.remove(f, True) |
| 838 | except OSError: |
| 839 | pass |
| 840 | |
| 841 | raise FetchError(error_message) |
| 842 | |
| 843 | return output |
| 844 | |
| 845 | def check_network_access(d, info = "", url = None): |
| 846 | """ |
| 847 | log remote network access, and error if BB_NO_NETWORK is set |
| 848 | """ |
| 849 | if d.getVar("BB_NO_NETWORK", True) == "1": |
| 850 | raise NetworkAccess(url, info) |
| 851 | else: |
| 852 | logger.debug(1, "Fetcher accessed the network with the command %s" % info) |
| 853 | |
| 854 | def build_mirroruris(origud, mirrors, ld): |
| 855 | uris = [] |
| 856 | uds = [] |
| 857 | |
| 858 | replacements = {} |
| 859 | replacements["TYPE"] = origud.type |
| 860 | replacements["HOST"] = origud.host |
| 861 | replacements["PATH"] = origud.path |
| 862 | replacements["BASENAME"] = origud.path.split("/")[-1] |
| 863 | replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.') |
| 864 | |
Patrick Williams | d7e9631 | 2015-09-22 08:09:05 -0500 | [diff] [blame^] | 865 | def adduri(ud, uris, uds, mirrors): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 866 | for line in mirrors: |
| 867 | try: |
| 868 | (find, replace) = line |
| 869 | except ValueError: |
| 870 | continue |
| 871 | newuri = uri_replace(ud, find, replace, replacements, ld) |
| 872 | if not newuri or newuri in uris or newuri == origud.url: |
| 873 | continue |
| 874 | |
| 875 | if not trusted_network(ld, newuri): |
| 876 | logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri)) |
| 877 | continue |
| 878 | |
Patrick Williams | d7e9631 | 2015-09-22 08:09:05 -0500 | [diff] [blame^] | 879 | # Create a local copy of the mirrors minus the current line |
| 880 | # this will prevent us from recursively processing the same line |
| 881 | # as well as indirect recursion A -> B -> C -> A |
| 882 | localmirrors = list(mirrors) |
| 883 | localmirrors.remove(line) |
| 884 | |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 885 | try: |
| 886 | newud = FetchData(newuri, ld) |
| 887 | newud.setup_localpath(ld) |
| 888 | except bb.fetch2.BBFetchException as e: |
| 889 | logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) |
| 890 | logger.debug(1, str(e)) |
| 891 | try: |
| 892 | # setup_localpath of file:// urls may fail, we should still see |
| 893 | # if mirrors of the url exist |
Patrick Williams | d7e9631 | 2015-09-22 08:09:05 -0500 | [diff] [blame^] | 894 | adduri(newud, uris, uds, localmirrors) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 895 | except UnboundLocalError: |
| 896 | pass |
| 897 | continue |
| 898 | uris.append(newuri) |
| 899 | uds.append(newud) |
| 900 | |
Patrick Williams | d7e9631 | 2015-09-22 08:09:05 -0500 | [diff] [blame^] | 901 | adduri(newud, uris, uds, localmirrors) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 902 | |
Patrick Williams | d7e9631 | 2015-09-22 08:09:05 -0500 | [diff] [blame^] | 903 | adduri(origud, uris, uds, mirrors) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 904 | |
| 905 | return uris, uds |
| 906 | |
| 907 | def rename_bad_checksum(ud, suffix): |
| 908 | """ |
| 909 | Renames files to have suffix from parameter |
| 910 | """ |
| 911 | |
| 912 | if ud.localpath is None: |
| 913 | return |
| 914 | |
| 915 | new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix) |
| 916 | bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath)) |
| 917 | bb.utils.movefile(ud.localpath, new_localpath) |
| 918 | |
| 919 | |
| 920 | def try_mirror_url(fetch, origud, ud, ld, check = False): |
| 921 | # Return of None or a value means we're finished |
| 922 | # False means try another url |
| 923 | try: |
| 924 | if check: |
| 925 | found = ud.method.checkstatus(fetch, ud, ld) |
| 926 | if found: |
| 927 | return found |
| 928 | return False |
| 929 | |
| 930 | os.chdir(ld.getVar("DL_DIR", True)) |
| 931 | |
| 932 | if not verify_donestamp(ud, ld, origud) or ud.method.need_update(ud, ld): |
| 933 | ud.method.download(ud, ld) |
| 934 | if hasattr(ud.method,"build_mirror_data"): |
| 935 | ud.method.build_mirror_data(ud, ld) |
| 936 | |
| 937 | if not ud.localpath or not os.path.exists(ud.localpath): |
| 938 | return False |
| 939 | |
| 940 | if ud.localpath == origud.localpath: |
| 941 | return ud.localpath |
| 942 | |
| 943 | # We may be obtaining a mirror tarball which needs further processing by the real fetcher |
| 944 | # If that tarball is a local file:// we need to provide a symlink to it |
| 945 | dldir = ld.getVar("DL_DIR", True) |
| 946 | if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \ |
| 947 | and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): |
| 948 | # Create donestamp in old format to avoid triggering a re-download |
| 949 | bb.utils.mkdirhier(os.path.dirname(ud.donestamp)) |
| 950 | open(ud.donestamp, 'w').close() |
| 951 | dest = os.path.join(dldir, os.path.basename(ud.localpath)) |
| 952 | if not os.path.exists(dest): |
| 953 | os.symlink(ud.localpath, dest) |
| 954 | if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld): |
| 955 | origud.method.download(origud, ld) |
| 956 | if hasattr(origud.method,"build_mirror_data"): |
| 957 | origud.method.build_mirror_data(origud, ld) |
| 958 | return ud.localpath |
| 959 | # Otherwise the result is a local file:// and we symlink to it |
| 960 | if not os.path.exists(origud.localpath): |
| 961 | if os.path.islink(origud.localpath): |
| 962 | # Broken symbolic link |
| 963 | os.unlink(origud.localpath) |
| 964 | |
| 965 | os.symlink(ud.localpath, origud.localpath) |
| 966 | update_stamp(origud, ld) |
| 967 | return ud.localpath |
| 968 | |
| 969 | except bb.fetch2.NetworkAccess: |
| 970 | raise |
| 971 | |
| 972 | except bb.fetch2.BBFetchException as e: |
| 973 | if isinstance(e, ChecksumError): |
| 974 | logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url)) |
| 975 | logger.warn(str(e)) |
| 976 | rename_bad_checksum(ud, e.checksum) |
| 977 | elif isinstance(e, NoChecksumError): |
| 978 | raise |
| 979 | else: |
| 980 | logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) |
| 981 | logger.debug(1, str(e)) |
| 982 | try: |
| 983 | ud.method.clean(ud, ld) |
| 984 | except UnboundLocalError: |
| 985 | pass |
| 986 | return False |
| 987 | |
| 988 | def try_mirrors(fetch, d, origud, mirrors, check = False): |
| 989 | """ |
| 990 | Try to use a mirrored version of the sources. |
| 991 | This method will be automatically called before the fetchers go. |
| 992 | |
| 993 | d Is a bb.data instance |
| 994 | uri is the original uri we're trying to download |
| 995 | mirrors is the list of mirrors we're going to try |
| 996 | """ |
| 997 | ld = d.createCopy() |
| 998 | |
| 999 | uris, uds = build_mirroruris(origud, mirrors, ld) |
| 1000 | |
| 1001 | for index, uri in enumerate(uris): |
| 1002 | ret = try_mirror_url(fetch, origud, uds[index], ld, check) |
| 1003 | if ret != False: |
| 1004 | return ret |
| 1005 | return None |
| 1006 | |
| 1007 | def trusted_network(d, url): |
| 1008 | """ |
| 1009 | Use a trusted url during download if networking is enabled and |
| 1010 | BB_ALLOWED_NETWORKS is set globally or for a specific recipe. |
| 1011 | Note: modifies SRC_URI & mirrors. |
| 1012 | """ |
| 1013 | if d.getVar('BB_NO_NETWORK', True) == "1": |
| 1014 | return True |
| 1015 | |
| 1016 | pkgname = d.expand(d.getVar('PN', False)) |
| 1017 | trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname) |
| 1018 | |
| 1019 | if not trusted_hosts: |
| 1020 | trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS', True) |
| 1021 | |
| 1022 | # Not enabled. |
| 1023 | if not trusted_hosts: |
| 1024 | return True |
| 1025 | |
| 1026 | scheme, network, path, user, passwd, param = decodeurl(url) |
| 1027 | |
| 1028 | if not network: |
| 1029 | return True |
| 1030 | |
| 1031 | network = network.lower() |
| 1032 | |
| 1033 | for host in trusted_hosts.split(" "): |
| 1034 | host = host.lower() |
| 1035 | if host.startswith("*.") and ("." + network).endswith(host[1:]): |
| 1036 | return True |
| 1037 | if host == network: |
| 1038 | return True |
| 1039 | |
| 1040 | return False |
| 1041 | |
| 1042 | def srcrev_internal_helper(ud, d, name): |
| 1043 | """ |
| 1044 | Return: |
| 1045 | a) a source revision if specified |
| 1046 | b) latest revision if SRCREV="AUTOINC" |
| 1047 | c) None if not specified |
| 1048 | """ |
| 1049 | |
| 1050 | srcrev = None |
| 1051 | pn = d.getVar("PN", True) |
| 1052 | attempts = [] |
| 1053 | if name != '' and pn: |
| 1054 | attempts.append("SRCREV_%s_pn-%s" % (name, pn)) |
| 1055 | if name != '': |
| 1056 | attempts.append("SRCREV_%s" % name) |
| 1057 | if pn: |
| 1058 | attempts.append("SRCREV_pn-%s" % pn) |
| 1059 | attempts.append("SRCREV") |
| 1060 | |
| 1061 | for a in attempts: |
| 1062 | srcrev = d.getVar(a, True) |
| 1063 | if srcrev and srcrev != "INVALID": |
| 1064 | break |
| 1065 | |
| 1066 | if 'rev' in ud.parm and 'tag' in ud.parm: |
| 1067 | raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url)) |
| 1068 | |
| 1069 | if 'rev' in ud.parm or 'tag' in ud.parm: |
| 1070 | if 'rev' in ud.parm: |
| 1071 | parmrev = ud.parm['rev'] |
| 1072 | else: |
| 1073 | parmrev = ud.parm['tag'] |
| 1074 | if srcrev == "INVALID" or not srcrev: |
| 1075 | return parmrev |
| 1076 | if srcrev != parmrev: |
| 1077 | raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please spcify one valid value" % (srcrev, parmrev)) |
| 1078 | return parmrev |
| 1079 | |
| 1080 | if srcrev == "INVALID" or not srcrev: |
| 1081 | raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url) |
| 1082 | if srcrev == "AUTOINC": |
| 1083 | srcrev = ud.method.latest_revision(ud, d, name) |
| 1084 | |
| 1085 | return srcrev |
| 1086 | |
| 1087 | def get_checksum_file_list(d): |
| 1088 | """ Get a list of files checksum in SRC_URI |
| 1089 | |
| 1090 | Returns the resolved local paths of all local file entries in |
| 1091 | SRC_URI as a space-separated string |
| 1092 | """ |
| 1093 | fetch = Fetch([], d, cache = False, localonly = True) |
| 1094 | |
| 1095 | dl_dir = d.getVar('DL_DIR', True) |
| 1096 | filelist = [] |
| 1097 | for u in fetch.urls: |
| 1098 | ud = fetch.ud[u] |
| 1099 | |
| 1100 | if ud and isinstance(ud.method, local.Local): |
| 1101 | paths = ud.method.localpaths(ud, d) |
| 1102 | for f in paths: |
| 1103 | pth = ud.decodedurl |
| 1104 | if '*' in pth: |
| 1105 | f = os.path.join(os.path.abspath(f), pth) |
| 1106 | if f.startswith(dl_dir): |
| 1107 | # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else |
| 1108 | if os.path.exists(f): |
| 1109 | bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f))) |
| 1110 | else: |
| 1111 | bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f))) |
| 1112 | filelist.append(f + ":" + str(os.path.exists(f))) |
| 1113 | |
| 1114 | return " ".join(filelist) |
| 1115 | |
| 1116 | def get_file_checksums(filelist, pn): |
| 1117 | """Get a list of the checksums for a list of local files |
| 1118 | |
| 1119 | Returns the checksums for a list of local files, caching the results as |
| 1120 | it proceeds |
| 1121 | |
| 1122 | """ |
| 1123 | |
| 1124 | def checksum_file(f): |
| 1125 | try: |
| 1126 | checksum = _checksum_cache.get_checksum(f) |
| 1127 | except OSError as e: |
| 1128 | bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e)) |
| 1129 | return None |
| 1130 | return checksum |
| 1131 | |
| 1132 | def checksum_dir(pth): |
| 1133 | # Handle directories recursively |
| 1134 | dirchecksums = [] |
| 1135 | for root, dirs, files in os.walk(pth): |
| 1136 | for name in files: |
| 1137 | fullpth = os.path.join(root, name) |
| 1138 | checksum = checksum_file(fullpth) |
| 1139 | if checksum: |
| 1140 | dirchecksums.append((fullpth, checksum)) |
| 1141 | return dirchecksums |
| 1142 | |
| 1143 | checksums = [] |
| 1144 | for pth in filelist.split(): |
| 1145 | exist = pth.split(":")[1] |
| 1146 | if exist == "False": |
| 1147 | continue |
| 1148 | pth = pth.split(":")[0] |
| 1149 | if '*' in pth: |
| 1150 | # Handle globs |
| 1151 | for f in glob.glob(pth): |
| 1152 | if os.path.isdir(f): |
| 1153 | checksums.extend(checksum_dir(f)) |
| 1154 | else: |
| 1155 | checksum = checksum_file(f) |
| 1156 | checksums.append((f, checksum)) |
| 1157 | elif os.path.isdir(pth): |
| 1158 | checksums.extend(checksum_dir(pth)) |
| 1159 | else: |
| 1160 | checksum = checksum_file(pth) |
| 1161 | checksums.append((pth, checksum)) |
| 1162 | |
| 1163 | checksums.sort(key=operator.itemgetter(1)) |
| 1164 | return checksums |
| 1165 | |
| 1166 | |
| 1167 | class FetchData(object): |
| 1168 | """ |
| 1169 | A class which represents the fetcher state for a given URI. |
| 1170 | """ |
| 1171 | def __init__(self, url, d, localonly = False): |
| 1172 | # localpath is the location of a downloaded result. If not set, the file is local. |
| 1173 | self.donestamp = None |
| 1174 | self.localfile = "" |
| 1175 | self.localpath = None |
| 1176 | self.lockfile = None |
| 1177 | self.mirrortarball = None |
| 1178 | self.basename = None |
| 1179 | self.basepath = None |
| 1180 | (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d)) |
| 1181 | self.date = self.getSRCDate(d) |
| 1182 | self.url = url |
| 1183 | if not self.user and "user" in self.parm: |
| 1184 | self.user = self.parm["user"] |
| 1185 | if not self.pswd and "pswd" in self.parm: |
| 1186 | self.pswd = self.parm["pswd"] |
| 1187 | self.setup = False |
| 1188 | |
| 1189 | if "name" in self.parm: |
| 1190 | self.md5_name = "%s.md5sum" % self.parm["name"] |
| 1191 | self.sha256_name = "%s.sha256sum" % self.parm["name"] |
| 1192 | else: |
| 1193 | self.md5_name = "md5sum" |
| 1194 | self.sha256_name = "sha256sum" |
| 1195 | if self.md5_name in self.parm: |
| 1196 | self.md5_expected = self.parm[self.md5_name] |
| 1197 | elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]: |
| 1198 | self.md5_expected = None |
| 1199 | else: |
| 1200 | self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name) |
| 1201 | if self.sha256_name in self.parm: |
| 1202 | self.sha256_expected = self.parm[self.sha256_name] |
| 1203 | elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]: |
| 1204 | self.sha256_expected = None |
| 1205 | else: |
| 1206 | self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name) |
| 1207 | self.ignore_checksums = False |
| 1208 | |
| 1209 | self.names = self.parm.get("name",'default').split(',') |
| 1210 | |
| 1211 | self.method = None |
| 1212 | for m in methods: |
| 1213 | if m.supports(self, d): |
| 1214 | self.method = m |
| 1215 | break |
| 1216 | |
| 1217 | if not self.method: |
| 1218 | raise NoMethodError(url) |
| 1219 | |
| 1220 | if localonly and not isinstance(self.method, local.Local): |
| 1221 | raise NonLocalMethod() |
| 1222 | |
| 1223 | if self.parm.get("proto", None) and "protocol" not in self.parm: |
| 1224 | logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True)) |
| 1225 | self.parm["protocol"] = self.parm.get("proto", None) |
| 1226 | |
| 1227 | if hasattr(self.method, "urldata_init"): |
| 1228 | self.method.urldata_init(self, d) |
| 1229 | |
| 1230 | if "localpath" in self.parm: |
| 1231 | # if user sets localpath for file, use it instead. |
| 1232 | self.localpath = self.parm["localpath"] |
| 1233 | self.basename = os.path.basename(self.localpath) |
| 1234 | elif self.localfile: |
| 1235 | self.localpath = self.method.localpath(self, d) |
| 1236 | |
| 1237 | dldir = d.getVar("DL_DIR", True) |
| 1238 | # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be. |
| 1239 | if self.localpath and self.localpath.startswith(dldir): |
| 1240 | basepath = self.localpath |
| 1241 | elif self.localpath: |
| 1242 | basepath = dldir + os.sep + os.path.basename(self.localpath) |
| 1243 | else: |
| 1244 | basepath = dldir + os.sep + (self.basepath or self.basename) |
| 1245 | self.donestamp = basepath + '.done' |
| 1246 | self.lockfile = basepath + '.lock' |
| 1247 | |
| 1248 | def setup_revisons(self, d): |
| 1249 | self.revisions = {} |
| 1250 | for name in self.names: |
| 1251 | self.revisions[name] = srcrev_internal_helper(self, d, name) |
| 1252 | |
| 1253 | # add compatibility code for non name specified case |
| 1254 | if len(self.names) == 1: |
| 1255 | self.revision = self.revisions[self.names[0]] |
| 1256 | |
| 1257 | def setup_localpath(self, d): |
| 1258 | if not self.localpath: |
| 1259 | self.localpath = self.method.localpath(self, d) |
| 1260 | |
| 1261 | def getSRCDate(self, d): |
| 1262 | """ |
| 1263 | Return the SRC Date for the component |
| 1264 | |
| 1265 | d the bb.data module |
| 1266 | """ |
| 1267 | if "srcdate" in self.parm: |
| 1268 | return self.parm['srcdate'] |
| 1269 | |
| 1270 | pn = d.getVar("PN", True) |
| 1271 | |
| 1272 | if pn: |
| 1273 | return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True) |
| 1274 | |
| 1275 | return d.getVar("SRCDATE", True) or d.getVar("DATE", True) |
| 1276 | |
| 1277 | class FetchMethod(object): |
| 1278 | """Base class for 'fetch'ing data""" |
| 1279 | |
| 1280 | def __init__(self, urls=None): |
| 1281 | self.urls = [] |
| 1282 | |
| 1283 | def supports(self, urldata, d): |
| 1284 | """ |
| 1285 | Check to see if this fetch class supports a given url. |
| 1286 | """ |
| 1287 | return 0 |
| 1288 | |
| 1289 | def localpath(self, urldata, d): |
| 1290 | """ |
| 1291 | Return the local filename of a given url assuming a successful fetch. |
| 1292 | Can also setup variables in urldata for use in go (saving code duplication |
| 1293 | and duplicate code execution) |
| 1294 | """ |
| 1295 | return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile) |
| 1296 | |
| 1297 | def supports_checksum(self, urldata): |
| 1298 | """ |
| 1299 | Is localpath something that can be represented by a checksum? |
| 1300 | """ |
| 1301 | |
| 1302 | # We cannot compute checksums for directories |
| 1303 | if os.path.isdir(urldata.localpath) == True: |
| 1304 | return False |
| 1305 | if urldata.localpath.find("*") != -1: |
| 1306 | return False |
| 1307 | |
| 1308 | return True |
| 1309 | |
| 1310 | def recommends_checksum(self, urldata): |
| 1311 | """ |
| 1312 | Is the backend on where checksumming is recommended (should warnings |
| 1313 | be displayed if there is no checksum)? |
| 1314 | """ |
| 1315 | return False |
| 1316 | |
| 1317 | def _strip_leading_slashes(self, relpath): |
| 1318 | """ |
| 1319 | Remove leading slash as os.path.join can't cope |
| 1320 | """ |
| 1321 | while os.path.isabs(relpath): |
| 1322 | relpath = relpath[1:] |
| 1323 | return relpath |
| 1324 | |
| 1325 | def setUrls(self, urls): |
| 1326 | self.__urls = urls |
| 1327 | |
| 1328 | def getUrls(self): |
| 1329 | return self.__urls |
| 1330 | |
| 1331 | urls = property(getUrls, setUrls, None, "Urls property") |
| 1332 | |
| 1333 | def need_update(self, ud, d): |
| 1334 | """ |
| 1335 | Force a fetch, even if localpath exists? |
| 1336 | """ |
| 1337 | if os.path.exists(ud.localpath): |
| 1338 | return False |
| 1339 | return True |
| 1340 | |
| 1341 | def supports_srcrev(self): |
| 1342 | """ |
| 1343 | The fetcher supports auto source revisions (SRCREV) |
| 1344 | """ |
| 1345 | return False |
| 1346 | |
| 1347 | def download(self, urldata, d): |
| 1348 | """ |
| 1349 | Fetch urls |
| 1350 | Assumes localpath was called first |
| 1351 | """ |
| 1352 | raise NoMethodError(url) |
| 1353 | |
| 1354 | def unpack(self, urldata, rootdir, data): |
| 1355 | iterate = False |
| 1356 | file = urldata.localpath |
| 1357 | |
| 1358 | try: |
| 1359 | unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True) |
| 1360 | except ValueError as exc: |
| 1361 | bb.fatal("Invalid value for 'unpack' parameter for %s: %s" % |
| 1362 | (file, urldata.parm.get('unpack'))) |
| 1363 | |
| 1364 | base, ext = os.path.splitext(file) |
| 1365 | if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']: |
| 1366 | efile = os.path.join(rootdir, os.path.basename(base)) |
| 1367 | else: |
| 1368 | efile = file |
| 1369 | cmd = None |
| 1370 | |
| 1371 | if unpack: |
| 1372 | if file.endswith('.tar'): |
| 1373 | cmd = 'tar x --no-same-owner -f %s' % file |
| 1374 | elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'): |
| 1375 | cmd = 'tar xz --no-same-owner -f %s' % file |
| 1376 | elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'): |
| 1377 | cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file |
| 1378 | elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'): |
| 1379 | cmd = 'gzip -dc %s > %s' % (file, efile) |
| 1380 | elif file.endswith('.bz2'): |
| 1381 | cmd = 'bzip2 -dc %s > %s' % (file, efile) |
| 1382 | elif file.endswith('.tar.xz'): |
| 1383 | cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file |
| 1384 | elif file.endswith('.xz'): |
| 1385 | cmd = 'xz -dc %s > %s' % (file, efile) |
| 1386 | elif file.endswith('.tar.lz'): |
| 1387 | cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file |
| 1388 | elif file.endswith('.lz'): |
| 1389 | cmd = 'lzip -dc %s > %s' % (file, efile) |
| 1390 | elif file.endswith('.zip') or file.endswith('.jar'): |
| 1391 | try: |
| 1392 | dos = bb.utils.to_boolean(urldata.parm.get('dos'), False) |
| 1393 | except ValueError as exc: |
| 1394 | bb.fatal("Invalid value for 'dos' parameter for %s: %s" % |
| 1395 | (file, urldata.parm.get('dos'))) |
| 1396 | cmd = 'unzip -q -o' |
| 1397 | if dos: |
| 1398 | cmd = '%s -a' % cmd |
| 1399 | cmd = "%s '%s'" % (cmd, file) |
| 1400 | elif file.endswith('.rpm') or file.endswith('.srpm'): |
| 1401 | if 'extract' in urldata.parm: |
| 1402 | unpack_file = urldata.parm.get('extract') |
| 1403 | cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file) |
| 1404 | iterate = True |
| 1405 | iterate_file = unpack_file |
| 1406 | else: |
| 1407 | cmd = 'rpm2cpio.sh %s | cpio -id' % (file) |
| 1408 | elif file.endswith('.deb') or file.endswith('.ipk'): |
| 1409 | cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file |
| 1410 | |
| 1411 | if not unpack or not cmd: |
| 1412 | # If file == dest, then avoid any copies, as we already put the file into dest! |
| 1413 | dest = os.path.join(rootdir, os.path.basename(file)) |
| 1414 | if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)): |
| 1415 | if os.path.isdir(file): |
| 1416 | # If for example we're asked to copy file://foo/bar, we need to unpack the result into foo/bar |
| 1417 | basepath = getattr(urldata, "basepath", None) |
| 1418 | destdir = "." |
| 1419 | if basepath and basepath.endswith("/"): |
| 1420 | basepath = basepath.rstrip("/") |
| 1421 | elif basepath: |
| 1422 | basepath = os.path.dirname(basepath) |
| 1423 | if basepath and basepath.find("/") != -1: |
| 1424 | destdir = basepath[:basepath.rfind('/')] |
| 1425 | destdir = destdir.strip('/') |
| 1426 | if destdir != "." and not os.access("%s/%s" % (rootdir, destdir), os.F_OK): |
| 1427 | os.makedirs("%s/%s" % (rootdir, destdir)) |
| 1428 | cmd = 'cp -fpPR %s %s/%s/' % (file, rootdir, destdir) |
| 1429 | #cmd = 'tar -cf - -C "%d" -ps . | tar -xf - -C "%s/%s/"' % (file, rootdir, destdir) |
| 1430 | else: |
| 1431 | # The "destdir" handling was specifically done for FILESPATH |
| 1432 | # items. So, only do so for file:// entries. |
| 1433 | if urldata.type == "file" and urldata.path.find("/") != -1: |
| 1434 | destdir = urldata.path.rsplit("/", 1)[0] |
| 1435 | if urldata.parm.get('subdir') != None: |
| 1436 | destdir = urldata.parm.get('subdir') + "/" + destdir |
| 1437 | else: |
| 1438 | if urldata.parm.get('subdir') != None: |
| 1439 | destdir = urldata.parm.get('subdir') |
| 1440 | else: |
| 1441 | destdir = "." |
| 1442 | bb.utils.mkdirhier("%s/%s" % (rootdir, destdir)) |
| 1443 | cmd = 'cp -f %s %s/%s/' % (file, rootdir, destdir) |
| 1444 | |
| 1445 | if not cmd: |
| 1446 | return |
| 1447 | |
| 1448 | # Change to subdir before executing command |
| 1449 | save_cwd = os.getcwd(); |
| 1450 | os.chdir(rootdir) |
| 1451 | if 'subdir' in urldata.parm: |
| 1452 | newdir = ("%s/%s" % (rootdir, urldata.parm.get('subdir'))) |
| 1453 | bb.utils.mkdirhier(newdir) |
| 1454 | os.chdir(newdir) |
| 1455 | |
| 1456 | path = data.getVar('PATH', True) |
| 1457 | if path: |
| 1458 | cmd = "PATH=\"%s\" %s" % (path, cmd) |
| 1459 | bb.note("Unpacking %s to %s/" % (file, os.getcwd())) |
| 1460 | ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True) |
| 1461 | |
| 1462 | os.chdir(save_cwd) |
| 1463 | |
| 1464 | if ret != 0: |
| 1465 | raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url) |
| 1466 | |
| 1467 | if iterate is True: |
| 1468 | iterate_urldata = urldata |
| 1469 | iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file) |
| 1470 | self.unpack(urldata, rootdir, data) |
| 1471 | |
| 1472 | return |
| 1473 | |
| 1474 | def clean(self, urldata, d): |
| 1475 | """ |
| 1476 | Clean any existing full or partial download |
| 1477 | """ |
| 1478 | bb.utils.remove(urldata.localpath) |
| 1479 | |
| 1480 | def try_premirror(self, urldata, d): |
| 1481 | """ |
| 1482 | Should premirrors be used? |
| 1483 | """ |
| 1484 | return True |
| 1485 | |
| 1486 | def checkstatus(self, fetch, urldata, d): |
| 1487 | """ |
| 1488 | Check the status of a URL |
| 1489 | Assumes localpath was called first |
| 1490 | """ |
| 1491 | logger.info("URL %s could not be checked for status since no method exists.", url) |
| 1492 | return True |
| 1493 | |
| 1494 | def latest_revision(self, ud, d, name): |
| 1495 | """ |
| 1496 | Look in the cache for the latest revision, if not present ask the SCM. |
| 1497 | """ |
| 1498 | if not hasattr(self, "_latest_revision"): |
| 1499 | raise ParameterError("The fetcher for this URL does not support _latest_revision", url) |
| 1500 | |
| 1501 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) |
| 1502 | key = self.generate_revision_key(ud, d, name) |
| 1503 | try: |
| 1504 | return revs[key] |
| 1505 | except KeyError: |
| 1506 | revs[key] = rev = self._latest_revision(ud, d, name) |
| 1507 | return rev |
| 1508 | |
| 1509 | def sortable_revision(self, ud, d, name): |
| 1510 | latest_rev = self._build_revision(ud, d, name) |
| 1511 | return True, str(latest_rev) |
| 1512 | |
| 1513 | def generate_revision_key(self, ud, d, name): |
| 1514 | key = self._revision_key(ud, d, name) |
| 1515 | return "%s-%s" % (key, d.getVar("PN", True) or "") |
| 1516 | |
| 1517 | class Fetch(object): |
| 1518 | def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None): |
| 1519 | if localonly and cache: |
| 1520 | raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") |
| 1521 | |
| 1522 | if len(urls) == 0: |
| 1523 | urls = d.getVar("SRC_URI", True).split() |
| 1524 | self.urls = urls |
| 1525 | self.d = d |
| 1526 | self.ud = {} |
| 1527 | self.connection_cache = connection_cache |
| 1528 | |
| 1529 | fn = d.getVar('FILE', True) |
| 1530 | if cache and fn and fn in urldata_cache: |
| 1531 | self.ud = urldata_cache[fn] |
| 1532 | |
| 1533 | for url in urls: |
| 1534 | if url not in self.ud: |
| 1535 | try: |
| 1536 | self.ud[url] = FetchData(url, d, localonly) |
| 1537 | except NonLocalMethod: |
| 1538 | if localonly: |
| 1539 | self.ud[url] = None |
| 1540 | pass |
| 1541 | |
| 1542 | if fn and cache: |
| 1543 | urldata_cache[fn] = self.ud |
| 1544 | |
| 1545 | def localpath(self, url): |
| 1546 | if url not in self.urls: |
| 1547 | self.ud[url] = FetchData(url, self.d) |
| 1548 | |
| 1549 | self.ud[url].setup_localpath(self.d) |
| 1550 | return self.d.expand(self.ud[url].localpath) |
| 1551 | |
| 1552 | def localpaths(self): |
| 1553 | """ |
| 1554 | Return a list of the local filenames, assuming successful fetch |
| 1555 | """ |
| 1556 | local = [] |
| 1557 | |
| 1558 | for u in self.urls: |
| 1559 | ud = self.ud[u] |
| 1560 | ud.setup_localpath(self.d) |
| 1561 | local.append(ud.localpath) |
| 1562 | |
| 1563 | return local |
| 1564 | |
| 1565 | def download(self, urls=None): |
| 1566 | """ |
| 1567 | Fetch all urls |
| 1568 | """ |
| 1569 | if not urls: |
| 1570 | urls = self.urls |
| 1571 | |
| 1572 | network = self.d.getVar("BB_NO_NETWORK", True) |
| 1573 | premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1") |
| 1574 | |
| 1575 | for u in urls: |
| 1576 | ud = self.ud[u] |
| 1577 | ud.setup_localpath(self.d) |
| 1578 | m = ud.method |
| 1579 | localpath = "" |
| 1580 | |
| 1581 | lf = bb.utils.lockfile(ud.lockfile) |
| 1582 | |
| 1583 | try: |
| 1584 | self.d.setVar("BB_NO_NETWORK", network) |
| 1585 | |
| 1586 | if verify_donestamp(ud, self.d) and not m.need_update(ud, self.d): |
| 1587 | localpath = ud.localpath |
| 1588 | elif m.try_premirror(ud, self.d): |
| 1589 | logger.debug(1, "Trying PREMIRRORS") |
| 1590 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) |
| 1591 | localpath = try_mirrors(self, self.d, ud, mirrors, False) |
| 1592 | |
| 1593 | if premirroronly: |
| 1594 | self.d.setVar("BB_NO_NETWORK", "1") |
| 1595 | |
| 1596 | os.chdir(self.d.getVar("DL_DIR", True)) |
| 1597 | |
| 1598 | firsterr = None |
| 1599 | verified_stamp = verify_donestamp(ud, self.d) |
| 1600 | if not localpath and (not verified_stamp or m.need_update(ud, self.d)): |
| 1601 | try: |
| 1602 | if not trusted_network(self.d, ud.url): |
| 1603 | raise UntrustedUrl(ud.url) |
| 1604 | logger.debug(1, "Trying Upstream") |
| 1605 | m.download(ud, self.d) |
| 1606 | if hasattr(m, "build_mirror_data"): |
| 1607 | m.build_mirror_data(ud, self.d) |
| 1608 | localpath = ud.localpath |
| 1609 | # early checksum verify, so that if checksum mismatched, |
| 1610 | # fetcher still have chance to fetch from mirror |
| 1611 | update_stamp(ud, self.d) |
| 1612 | |
| 1613 | except bb.fetch2.NetworkAccess: |
| 1614 | raise |
| 1615 | |
| 1616 | except BBFetchException as e: |
| 1617 | if isinstance(e, ChecksumError): |
| 1618 | logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u) |
| 1619 | logger.debug(1, str(e)) |
| 1620 | rename_bad_checksum(ud, e.checksum) |
| 1621 | elif isinstance(e, NoChecksumError): |
| 1622 | raise |
| 1623 | else: |
| 1624 | logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u) |
| 1625 | logger.debug(1, str(e)) |
| 1626 | firsterr = e |
| 1627 | # Remove any incomplete fetch |
| 1628 | if not verified_stamp: |
| 1629 | m.clean(ud, self.d) |
| 1630 | logger.debug(1, "Trying MIRRORS") |
| 1631 | mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) |
| 1632 | localpath = try_mirrors(self, self.d, ud, mirrors) |
| 1633 | |
| 1634 | if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1): |
| 1635 | if firsterr: |
| 1636 | logger.error(str(firsterr)) |
| 1637 | raise FetchError("Unable to fetch URL from any source.", u) |
| 1638 | |
| 1639 | update_stamp(ud, self.d) |
| 1640 | |
| 1641 | except BBFetchException as e: |
| 1642 | if isinstance(e, ChecksumError): |
| 1643 | logger.error("Checksum failure fetching %s" % u) |
| 1644 | raise |
| 1645 | |
| 1646 | finally: |
| 1647 | bb.utils.unlockfile(lf) |
| 1648 | |
| 1649 | def checkstatus(self, urls=None): |
| 1650 | """ |
| 1651 | Check all urls exist upstream |
| 1652 | """ |
| 1653 | |
| 1654 | if not urls: |
| 1655 | urls = self.urls |
| 1656 | |
| 1657 | for u in urls: |
| 1658 | ud = self.ud[u] |
| 1659 | ud.setup_localpath(self.d) |
| 1660 | m = ud.method |
| 1661 | logger.debug(1, "Testing URL %s", u) |
| 1662 | # First try checking uri, u, from PREMIRRORS |
| 1663 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) |
| 1664 | ret = try_mirrors(self, self.d, ud, mirrors, True) |
| 1665 | if not ret: |
| 1666 | # Next try checking from the original uri, u |
| 1667 | try: |
| 1668 | ret = m.checkstatus(self, ud, self.d) |
| 1669 | except: |
| 1670 | # Finally, try checking uri, u, from MIRRORS |
| 1671 | mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) |
| 1672 | ret = try_mirrors(self, self.d, ud, mirrors, True) |
| 1673 | |
| 1674 | if not ret: |
| 1675 | raise FetchError("URL %s doesn't work" % u, u) |
| 1676 | |
| 1677 | def unpack(self, root, urls=None): |
| 1678 | """ |
| 1679 | Check all urls exist upstream |
| 1680 | """ |
| 1681 | |
| 1682 | if not urls: |
| 1683 | urls = self.urls |
| 1684 | |
| 1685 | for u in urls: |
| 1686 | ud = self.ud[u] |
| 1687 | ud.setup_localpath(self.d) |
| 1688 | |
| 1689 | if self.d.expand(self.localpath) is None: |
| 1690 | continue |
| 1691 | |
| 1692 | if ud.lockfile: |
| 1693 | lf = bb.utils.lockfile(ud.lockfile) |
| 1694 | |
| 1695 | ud.method.unpack(ud, root, self.d) |
| 1696 | |
| 1697 | if ud.lockfile: |
| 1698 | bb.utils.unlockfile(lf) |
| 1699 | |
| 1700 | def clean(self, urls=None): |
| 1701 | """ |
| 1702 | Clean files that the fetcher gets or places |
| 1703 | """ |
| 1704 | |
| 1705 | if not urls: |
| 1706 | urls = self.urls |
| 1707 | |
| 1708 | for url in urls: |
| 1709 | if url not in self.ud: |
| 1710 | self.ud[url] = FetchData(url, d) |
| 1711 | ud = self.ud[url] |
| 1712 | ud.setup_localpath(self.d) |
| 1713 | |
| 1714 | if not ud.localfile and ud.localpath is None: |
| 1715 | continue |
| 1716 | |
| 1717 | if ud.lockfile: |
| 1718 | lf = bb.utils.lockfile(ud.lockfile) |
| 1719 | |
| 1720 | ud.method.clean(ud, self.d) |
| 1721 | if ud.donestamp: |
| 1722 | bb.utils.remove(ud.donestamp) |
| 1723 | |
| 1724 | if ud.lockfile: |
| 1725 | bb.utils.unlockfile(lf) |
| 1726 | |
| 1727 | class FetchConnectionCache(object): |
| 1728 | """ |
| 1729 | A class which represents an container for socket connections. |
| 1730 | """ |
| 1731 | def __init__(self): |
| 1732 | self.cache = {} |
| 1733 | |
| 1734 | def get_connection_name(self, host, port): |
| 1735 | return host + ':' + str(port) |
| 1736 | |
| 1737 | def add_connection(self, host, port, connection): |
| 1738 | cn = self.get_connection_name(host, port) |
| 1739 | |
| 1740 | if cn not in self.cache: |
| 1741 | self.cache[cn] = connection |
| 1742 | |
| 1743 | def get_connection(self, host, port): |
| 1744 | connection = None |
| 1745 | |
| 1746 | cn = self.get_connection_name(host, port) |
| 1747 | if cn in self.cache: |
| 1748 | connection = self.cache[cn] |
| 1749 | |
| 1750 | return connection |
| 1751 | |
| 1752 | def remove_connection(self, host, port): |
| 1753 | cn = self.get_connection_name(host, port) |
| 1754 | if cn in self.cache: |
| 1755 | self.cache[cn].close() |
| 1756 | del self.cache[cn] |
| 1757 | |
| 1758 | def close_connections(self): |
| 1759 | for cn in self.cache.keys(): |
| 1760 | self.cache[cn].close() |
| 1761 | del self.cache[cn] |
| 1762 | |
| 1763 | from . import cvs |
| 1764 | from . import git |
| 1765 | from . import gitsm |
| 1766 | from . import gitannex |
| 1767 | from . import local |
| 1768 | from . import svn |
| 1769 | from . import wget |
| 1770 | from . import ssh |
| 1771 | from . import sftp |
| 1772 | from . import perforce |
| 1773 | from . import bzr |
| 1774 | from . import hg |
| 1775 | from . import osc |
| 1776 | from . import repo |
| 1777 | from . import clearcase |
| 1778 | |
| 1779 | methods.append(local.Local()) |
| 1780 | methods.append(wget.Wget()) |
| 1781 | methods.append(svn.Svn()) |
| 1782 | methods.append(git.Git()) |
| 1783 | methods.append(gitsm.GitSM()) |
| 1784 | methods.append(gitannex.GitANNEX()) |
| 1785 | methods.append(cvs.Cvs()) |
| 1786 | methods.append(ssh.SSH()) |
| 1787 | methods.append(sftp.SFTP()) |
| 1788 | methods.append(perforce.Perforce()) |
| 1789 | methods.append(bzr.Bzr()) |
| 1790 | methods.append(hg.Hg()) |
| 1791 | methods.append(osc.Osc()) |
| 1792 | methods.append(repo.Repo()) |
| 1793 | methods.append(clearcase.ClearCase()) |