blob: cd7362c44a454d1bfc14524bc3b285537abbef55 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8"""
9
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2012 Intel Corporation
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
Patrick Williamsc124f4f2015-09-15 14:41:29 -050028import os, re
29import signal
Patrick Williamsc124f4f2015-09-15 14:41:29 -050030import logging
Patrick Williamsc0f7c042017-02-23 20:41:17 -060031import urllib.request, urllib.parse, urllib.error
32if 'git' not in urllib.parse.uses_netloc:
33 urllib.parse.uses_netloc.append('git')
34import operator
35import collections
36import subprocess
37import pickle
Patrick Williamsc124f4f2015-09-15 14:41:29 -050038import bb.persist_data, bb.utils
39import bb.checksum
40from bb import data
41import bb.process
Patrick Williamsc124f4f2015-09-15 14:41:29 -050042
43__version__ = "2"
44_checksum_cache = bb.checksum.FileChecksumCache()
45
46logger = logging.getLogger("BitBake.Fetcher")
47
Patrick Williamsc124f4f2015-09-15 14:41:29 -050048class BBFetchException(Exception):
49 """Class all fetch exceptions inherit from"""
50 def __init__(self, message):
51 self.msg = message
52 Exception.__init__(self, message)
53
54 def __str__(self):
55 return self.msg
56
57class UntrustedUrl(BBFetchException):
58 """Exception raised when encountering a host not listed in BB_ALLOWED_NETWORKS"""
59 def __init__(self, url, message=''):
60 if message:
61 msg = message
62 else:
63 msg = "The URL: '%s' is not trusted and cannot be used" % url
64 self.url = url
65 BBFetchException.__init__(self, msg)
66 self.args = (url,)
67
68class MalformedUrl(BBFetchException):
69 """Exception raised when encountering an invalid url"""
70 def __init__(self, url, message=''):
71 if message:
72 msg = message
73 else:
74 msg = "The URL: '%s' is invalid and cannot be interpreted" % url
75 self.url = url
76 BBFetchException.__init__(self, msg)
77 self.args = (url,)
78
79class FetchError(BBFetchException):
80 """General fetcher exception when something happens incorrectly"""
81 def __init__(self, message, url = None):
82 if url:
83 msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
84 else:
85 msg = "Fetcher failure: %s" % message
86 self.url = url
87 BBFetchException.__init__(self, msg)
88 self.args = (message, url)
89
90class ChecksumError(FetchError):
91 """Exception when mismatched checksum encountered"""
92 def __init__(self, message, url = None, checksum = None):
93 self.checksum = checksum
94 FetchError.__init__(self, message, url)
95
96class NoChecksumError(FetchError):
97 """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
98
99class UnpackError(BBFetchException):
100 """General fetcher exception when something happens incorrectly when unpacking"""
101 def __init__(self, message, url):
102 msg = "Unpack failure for URL: '%s'. %s" % (url, message)
103 self.url = url
104 BBFetchException.__init__(self, msg)
105 self.args = (message, url)
106
107class NoMethodError(BBFetchException):
108 """Exception raised when there is no method to obtain a supplied url or set of urls"""
109 def __init__(self, url):
110 msg = "Could not find a fetcher which supports the URL: '%s'" % url
111 self.url = url
112 BBFetchException.__init__(self, msg)
113 self.args = (url,)
114
115class MissingParameterError(BBFetchException):
116 """Exception raised when a fetch method is missing a critical parameter in the url"""
117 def __init__(self, missing, url):
118 msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
119 self.url = url
120 self.missing = missing
121 BBFetchException.__init__(self, msg)
122 self.args = (missing, url)
123
124class ParameterError(BBFetchException):
125 """Exception raised when a url cannot be proccessed due to invalid parameters."""
126 def __init__(self, message, url):
127 msg = "URL: '%s' has invalid parameters. %s" % (url, message)
128 self.url = url
129 BBFetchException.__init__(self, msg)
130 self.args = (message, url)
131
132class NetworkAccess(BBFetchException):
133 """Exception raised when network access is disabled but it is required."""
134 def __init__(self, url, cmd):
135 msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
136 self.url = url
137 self.cmd = cmd
138 BBFetchException.__init__(self, msg)
139 self.args = (url, cmd)
140
141class NonLocalMethod(Exception):
142 def __init__(self):
143 Exception.__init__(self)
144
145
146class URI(object):
147 """
148 A class representing a generic URI, with methods for
149 accessing the URI components, and stringifies to the
150 URI.
151
152 It is constructed by calling it with a URI, or setting
153 the attributes manually:
154
155 uri = URI("http://example.com/")
156
157 uri = URI()
158 uri.scheme = 'http'
159 uri.hostname = 'example.com'
160 uri.path = '/'
161
162 It has the following attributes:
163
164 * scheme (read/write)
165 * userinfo (authentication information) (read/write)
166 * username (read/write)
167 * password (read/write)
168
169 Note, password is deprecated as of RFC 3986.
170
171 * hostname (read/write)
172 * port (read/write)
173 * hostport (read only)
174 "hostname:port", if both are set, otherwise just "hostname"
175 * path (read/write)
176 * path_quoted (read/write)
177 A URI quoted version of path
178 * params (dict) (read/write)
179 * query (dict) (read/write)
180 * relative (bool) (read only)
181 True if this is a "relative URI", (e.g. file:foo.diff)
182
183 It stringifies to the URI itself.
184
185 Some notes about relative URIs: while it's specified that
186 a URI beginning with <scheme>:// should either be directly
187 followed by a hostname or a /, the old URI handling of the
188 fetch2 library did not comform to this. Therefore, this URI
189 class has some kludges to make sure that URIs are parsed in
190 a way comforming to bitbake's current usage. This URI class
191 supports the following:
192
193 file:relative/path.diff (IETF compliant)
194 git:relative/path.git (IETF compliant)
195 git:///absolute/path.git (IETF compliant)
196 file:///absolute/path.diff (IETF compliant)
197
198 file://relative/path.diff (not IETF compliant)
199
200 But it does not support the following:
201
202 file://hostname/absolute/path.diff (would be IETF compliant)
203
204 Note that the last case only applies to a list of
205 "whitelisted" schemes (currently only file://), that requires
206 its URIs to not have a network location.
207 """
208
209 _relative_schemes = ['file', 'git']
210 _netloc_forbidden = ['file']
211
212 def __init__(self, uri=None):
213 self.scheme = ''
214 self.userinfo = ''
215 self.hostname = ''
216 self.port = None
217 self._path = ''
218 self.params = {}
219 self.query = {}
220 self.relative = False
221
222 if not uri:
223 return
224
225 # We hijack the URL parameters, since the way bitbake uses
226 # them are not quite RFC compliant.
227 uri, param_str = (uri.split(";", 1) + [None])[:2]
228
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600229 urlp = urllib.parse.urlparse(uri)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500230 self.scheme = urlp.scheme
231
232 reparse = 0
233
234 # Coerce urlparse to make URI scheme use netloc
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600235 if not self.scheme in urllib.parse.uses_netloc:
236 urllib.parse.uses_params.append(self.scheme)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500237 reparse = 1
238
239 # Make urlparse happy(/ier) by converting local resources
240 # to RFC compliant URL format. E.g.:
241 # file://foo.diff -> file:foo.diff
242 if urlp.scheme in self._netloc_forbidden:
243 uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
244 reparse = 1
245
246 if reparse:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600247 urlp = urllib.parse.urlparse(uri)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500248
249 # Identify if the URI is relative or not
250 if urlp.scheme in self._relative_schemes and \
251 re.compile("^\w+:(?!//)").match(uri):
252 self.relative = True
253
254 if not self.relative:
255 self.hostname = urlp.hostname or ''
256 self.port = urlp.port
257
258 self.userinfo += urlp.username or ''
259
260 if urlp.password:
261 self.userinfo += ':%s' % urlp.password
262
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600263 self.path = urllib.parse.unquote(urlp.path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500264
265 if param_str:
266 self.params = self._param_str_split(param_str, ";")
267 if urlp.query:
268 self.query = self._param_str_split(urlp.query, "&")
269
270 def __str__(self):
271 userinfo = self.userinfo
272 if userinfo:
273 userinfo += '@'
274
275 return "%s:%s%s%s%s%s%s" % (
276 self.scheme,
277 '' if self.relative else '//',
278 userinfo,
279 self.hostport,
280 self.path_quoted,
281 self._query_str(),
282 self._param_str())
283
284 def _param_str(self):
285 return (
286 ''.join([';', self._param_str_join(self.params, ";")])
287 if self.params else '')
288
289 def _query_str(self):
290 return (
291 ''.join(['?', self._param_str_join(self.query, "&")])
292 if self.query else '')
293
294 def _param_str_split(self, string, elmdelim, kvdelim="="):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600295 ret = collections.OrderedDict()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500296 for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
297 ret[k] = v
298 return ret
299
300 def _param_str_join(self, dict_, elmdelim, kvdelim="="):
301 return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
302
303 @property
304 def hostport(self):
305 if not self.port:
306 return self.hostname
307 return "%s:%d" % (self.hostname, self.port)
308
309 @property
310 def path_quoted(self):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600311 return urllib.parse.quote(self.path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500312
313 @path_quoted.setter
314 def path_quoted(self, path):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600315 self.path = urllib.parse.unquote(path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500316
317 @property
318 def path(self):
319 return self._path
320
321 @path.setter
322 def path(self, path):
323 self._path = path
324
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500325 if not path or re.compile("^/").match(path):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500326 self.relative = False
327 else:
328 self.relative = True
329
330 @property
331 def username(self):
332 if self.userinfo:
333 return (self.userinfo.split(":", 1))[0]
334 return ''
335
336 @username.setter
337 def username(self, username):
338 password = self.password
339 self.userinfo = username
340 if password:
341 self.userinfo += ":%s" % password
342
343 @property
344 def password(self):
345 if self.userinfo and ":" in self.userinfo:
346 return (self.userinfo.split(":", 1))[1]
347 return ''
348
349 @password.setter
350 def password(self, password):
351 self.userinfo = "%s:%s" % (self.username, password)
352
353def decodeurl(url):
354 """Decodes an URL into the tokens (scheme, network location, path,
355 user, password, parameters).
356 """
357
358 m = re.compile('(?P<type>[^:]*)://((?P<user>[^/]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
359 if not m:
360 raise MalformedUrl(url)
361
362 type = m.group('type')
363 location = m.group('location')
364 if not location:
365 raise MalformedUrl(url)
366 user = m.group('user')
367 parm = m.group('parm')
368
369 locidx = location.find('/')
370 if locidx != -1 and type.lower() != 'file':
371 host = location[:locidx]
372 path = location[locidx:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500373 elif type.lower() == 'file':
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500374 host = ""
375 path = location
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500376 else:
377 host = location
378 path = ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500379 if user:
380 m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
381 if m:
382 user = m.group('user')
383 pswd = m.group('pswd')
384 else:
385 user = ''
386 pswd = ''
387
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600388 p = collections.OrderedDict()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500389 if parm:
390 for s in parm.split(';'):
391 if s:
392 if not '=' in s:
393 raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
394 s1, s2 = s.split('=')
395 p[s1] = s2
396
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600397 return type, host, urllib.parse.unquote(path), user, pswd, p
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500398
399def encodeurl(decoded):
400 """Encodes a URL from tokens (scheme, network location, path,
401 user, password, parameters).
402 """
403
404 type, host, path, user, pswd, p = decoded
405
406 if not path:
407 raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
408 if not type:
409 raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
410 url = '%s://' % type
411 if user and type != "file":
412 url += "%s" % user
413 if pswd:
414 url += ":%s" % pswd
415 url += "@"
416 if host and type != "file":
417 url += "%s" % host
418 # Standardise path to ensure comparisons work
419 while '//' in path:
420 path = path.replace("//", "/")
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600421 url += "%s" % urllib.parse.quote(path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500422 if p:
423 for parm in p:
424 url += ";%s=%s" % (parm, p[parm])
425
426 return url
427
428def uri_replace(ud, uri_find, uri_replace, replacements, d):
429 if not ud.url or not uri_find or not uri_replace:
430 logger.error("uri_replace: passed an undefined value, not replacing")
431 return None
432 uri_decoded = list(decodeurl(ud.url))
433 uri_find_decoded = list(decodeurl(uri_find))
434 uri_replace_decoded = list(decodeurl(uri_replace))
435 logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
436 result_decoded = ['', '', '', '', '', {}]
437 for loc, i in enumerate(uri_find_decoded):
438 result_decoded[loc] = uri_decoded[loc]
439 regexp = i
440 if loc == 0 and regexp and not regexp.endswith("$"):
441 # Leaving the type unanchored can mean "https" matching "file" can become "files"
442 # which is clearly undesirable.
443 regexp += "$"
444 if loc == 5:
445 # Handle URL parameters
446 if i:
447 # Any specified URL parameters must match
448 for k in uri_replace_decoded[loc]:
449 if uri_decoded[loc][k] != uri_replace_decoded[loc][k]:
450 return None
451 # Overwrite any specified replacement parameters
452 for k in uri_replace_decoded[loc]:
453 for l in replacements:
454 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
455 result_decoded[loc][k] = uri_replace_decoded[loc][k]
456 elif (re.match(regexp, uri_decoded[loc])):
457 if not uri_replace_decoded[loc]:
458 result_decoded[loc] = ""
459 else:
460 for k in replacements:
461 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
462 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
Patrick Williamsd7e96312015-09-22 08:09:05 -0500463 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500464 if loc == 2:
465 # Handle path manipulations
466 basename = None
467 if uri_decoded[0] != uri_replace_decoded[0] and ud.mirrortarball:
468 # If the source and destination url types differ, must be a mirrortarball mapping
469 basename = os.path.basename(ud.mirrortarball)
470 # Kill parameters, they make no sense for mirror tarballs
471 uri_decoded[5] = {}
472 elif ud.localpath and ud.method.supports_checksum(ud):
473 basename = os.path.basename(ud.localpath)
474 if basename and not result_decoded[loc].endswith(basename):
475 result_decoded[loc] = os.path.join(result_decoded[loc], basename)
476 else:
477 return None
478 result = encodeurl(result_decoded)
479 if result == ud.url:
480 return None
481 logger.debug(2, "For url %s returning %s" % (ud.url, result))
482 return result
483
484methods = []
485urldata_cache = {}
486saved_headrevs = {}
487
488def fetcher_init(d):
489 """
490 Called to initialize the fetchers once the configuration data is known.
491 Calls before this must not hit the cache.
492 """
493 # When to drop SCM head revisions controlled by user policy
494 srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear"
495 if srcrev_policy == "cache":
496 logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
497 elif srcrev_policy == "clear":
498 logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
499 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
500 try:
501 bb.fetch2.saved_headrevs = revs.items()
502 except:
503 pass
504 revs.clear()
505 else:
506 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
507
508 _checksum_cache.init_cache(d)
509
510 for m in methods:
511 if hasattr(m, "init"):
512 m.init(d)
513
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500514def fetcher_parse_save():
515 _checksum_cache.save_extras()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500516
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500517def fetcher_parse_done():
518 _checksum_cache.save_merge()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500519
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500520def fetcher_compare_revisions():
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500521 """
522 Compare the revisions in the persistant cache with current values and
523 return true/false on whether they've changed.
524 """
525
526 data = bb.persist_data.persist('BB_URI_HEADREVS', d).items()
527 data2 = bb.fetch2.saved_headrevs
528
529 changed = False
530 for key in data:
531 if key not in data2 or data2[key] != data[key]:
532 logger.debug(1, "%s changed", key)
533 changed = True
534 return True
535 else:
536 logger.debug(2, "%s did not change", key)
537 return False
538
539def mirror_from_string(data):
540 return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
541
542def verify_checksum(ud, d, precomputed={}):
543 """
544 verify the MD5 and SHA256 checksum for downloaded src
545
546 Raises a FetchError if one or both of the SRC_URI checksums do not match
547 the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
548 checksums specified.
549
550 Returns a dict of checksums that can be stored in a done stamp file and
551 passed in as precomputed parameter in a later call to avoid re-computing
552 the checksums from the file. This allows verifying the checksums of the
553 file against those in the recipe each time, rather than only after
554 downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
555 """
556
557 _MD5_KEY = "md5"
558 _SHA256_KEY = "sha256"
559
560 if ud.ignore_checksums or not ud.method.supports_checksum(ud):
561 return {}
562
563 if _MD5_KEY in precomputed:
564 md5data = precomputed[_MD5_KEY]
565 else:
566 md5data = bb.utils.md5_file(ud.localpath)
567
568 if _SHA256_KEY in precomputed:
569 sha256data = precomputed[_SHA256_KEY]
570 else:
571 sha256data = bb.utils.sha256_file(ud.localpath)
572
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500573 if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500574 # If strict checking enabled and neither sum defined, raise error
575 strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0"
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500576 if strict == "1":
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500577 logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
578 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
579 (ud.localpath, ud.md5_name, md5data,
580 ud.sha256_name, sha256data))
581 raise NoChecksumError('Missing SRC_URI checksum', ud.url)
582
583 # Log missing sums so user can more easily add them
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600584 logger.warning('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
585 'SRC_URI[%s] = "%s"',
586 ud.localpath, ud.md5_name, md5data)
587 logger.warning('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
588 'SRC_URI[%s] = "%s"',
589 ud.localpath, ud.sha256_name, sha256data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500590
591 # We want to alert the user if a checksum is defined in the recipe but
592 # it does not match.
593 msg = ""
594 mismatch = False
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500595 if ud.md5_expected and ud.md5_expected != md5data:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500596 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
597 mismatch = True;
598
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500599 if ud.sha256_expected and ud.sha256_expected != sha256data:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500600 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
601 mismatch = True;
602
603 if mismatch:
604 msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
605
606 if len(msg):
607 raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
608
609 return {
610 _MD5_KEY: md5data,
611 _SHA256_KEY: sha256data
612 }
613
614
615def verify_donestamp(ud, d, origud=None):
616 """
617 Check whether the done stamp file has the right checksums (if the fetch
618 method supports them). If it doesn't, delete the done stamp and force
619 a re-download.
620
621 Returns True, if the donestamp exists and is valid, False otherwise. When
622 returning False, any existing done stamps are removed.
623 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500624 if not ud.needdonestamp:
625 return True
626
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500627 if not os.path.exists(ud.donestamp):
628 return False
629
630 if (not ud.method.supports_checksum(ud) or
631 (origud and not origud.method.supports_checksum(origud))):
632 # done stamp exists, checksums not supported; assume the local file is
633 # current
634 return True
635
636 if not os.path.exists(ud.localpath):
637 # done stamp exists, but the downloaded file does not; the done stamp
638 # must be incorrect, re-trigger the download
639 bb.utils.remove(ud.donestamp)
640 return False
641
642 precomputed_checksums = {}
643 # Only re-use the precomputed checksums if the donestamp is newer than the
644 # file. Do not rely on the mtime of directories, though. If ud.localpath is
645 # a directory, there will probably not be any checksums anyway.
646 if (os.path.isdir(ud.localpath) or
647 os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
648 try:
649 with open(ud.donestamp, "rb") as cachefile:
650 pickled = pickle.Unpickler(cachefile)
651 precomputed_checksums.update(pickled.load())
652 except Exception as e:
653 # Avoid the warnings on the upgrade path from emtpy done stamp
654 # files to those containing the checksums.
655 if not isinstance(e, EOFError):
656 # Ignore errors, they aren't fatal
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600657 logger.warning("Couldn't load checksums from donestamp %s: %s "
658 "(msg: %s)" % (ud.donestamp, type(e).__name__,
659 str(e)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500660
661 try:
662 checksums = verify_checksum(ud, d, precomputed_checksums)
663 # If the cache file did not have the checksums, compute and store them
664 # as an upgrade path from the previous done stamp file format.
665 if checksums != precomputed_checksums:
666 with open(ud.donestamp, "wb") as cachefile:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600667 p = pickle.Pickler(cachefile, 2)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500668 p.dump(checksums)
669 return True
670 except ChecksumError as e:
671 # Checksums failed to verify, trigger re-download and remove the
672 # incorrect stamp file.
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600673 logger.warning("Checksum mismatch for local file %s\n"
674 "Cleaning and trying again." % ud.localpath)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500675 if os.path.exists(ud.localpath):
676 rename_bad_checksum(ud, e.checksum)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500677 bb.utils.remove(ud.donestamp)
678 return False
679
680
681def update_stamp(ud, d):
682 """
683 donestamp is file stamp indicating the whole fetching is done
684 this function update the stamp after verifying the checksum
685 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500686 if not ud.needdonestamp:
687 return
688
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500689 if os.path.exists(ud.donestamp):
690 # Touch the done stamp file to show active use of the download
691 try:
692 os.utime(ud.donestamp, None)
693 except:
694 # Errors aren't fatal here
695 pass
696 else:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500697 try:
698 checksums = verify_checksum(ud, d)
699 # Store the checksums for later re-verification against the recipe
700 with open(ud.donestamp, "wb") as cachefile:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600701 p = pickle.Pickler(cachefile, 2)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500702 p.dump(checksums)
703 except ChecksumError as e:
704 # Checksums failed to verify, trigger re-download and remove the
705 # incorrect stamp file.
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600706 logger.warning("Checksum mismatch for local file %s\n"
707 "Cleaning and trying again." % ud.localpath)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500708 if os.path.exists(ud.localpath):
709 rename_bad_checksum(ud, e.checksum)
710 bb.utils.remove(ud.donestamp)
711 raise
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500712
713def subprocess_setup():
714 # Python installs a SIGPIPE handler by default. This is usually not what
715 # non-Python subprocesses expect.
716 # SIGPIPE errors are known issues with gzip/bash
717 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
718
719def get_autorev(d):
720 # only not cache src rev in autorev case
721 if d.getVar('BB_SRCREV_POLICY', True) != "cache":
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500722 d.setVar('BB_DONT_CACHE', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500723 return "AUTOINC"
724
725def get_srcrev(d, method_name='sortable_revision'):
726 """
727 Return the revsion string, usually for use in the version string (PV) of the current package
728 Most packages usually only have one SCM so we just pass on the call.
729 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
730 have been set.
731
732 The idea here is that we put the string "AUTOINC+" into return value if the revisions are not
733 incremental, other code is then responsible for turning that into an increasing value (if needed)
734
735 A method_name can be supplied to retrieve an alternatively formatted revision from a fetcher, if
736 that fetcher provides a method with the given name and the same signature as sortable_revision.
737 """
738
739 scms = []
740 fetcher = Fetch(d.getVar('SRC_URI', True).split(), d)
741 urldata = fetcher.ud
742 for u in urldata:
743 if urldata[u].method.supports_srcrev():
744 scms.append(u)
745
746 if len(scms) == 0:
747 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
748
749 if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
750 autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
751 if len(rev) > 10:
752 rev = rev[:10]
753 if autoinc:
754 return "AUTOINC+" + rev
755 return rev
756
757 #
758 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
759 #
760 format = d.getVar('SRCREV_FORMAT', True)
761 if not format:
762 raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
763
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600764 name_to_rev = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500765 seenautoinc = False
766 for scm in scms:
767 ud = urldata[scm]
768 for name in ud.names:
769 autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
770 seenautoinc = seenautoinc or autoinc
771 if len(rev) > 10:
772 rev = rev[:10]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600773 name_to_rev[name] = rev
774 # Replace names by revisions in the SRCREV_FORMAT string. The approach used
775 # here can handle names being prefixes of other names and names appearing
776 # as substrings in revisions (in which case the name should not be
777 # expanded). The '|' regular expression operator tries matches from left to
778 # right, so we need to sort the names with the longest ones first.
779 names_descending_len = sorted(name_to_rev, key=len, reverse=True)
780 name_to_rev_re = "|".join(re.escape(name) for name in names_descending_len)
781 format = re.sub(name_to_rev_re, lambda match: name_to_rev[match.group(0)], format)
782
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500783 if seenautoinc:
784 format = "AUTOINC+" + format
785
786 return format
787
788def localpath(url, d):
789 fetcher = bb.fetch2.Fetch([url], d)
790 return fetcher.localpath(url)
791
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600792def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500793 """
794 Run cmd returning the command output
795 Raise an error if interrupted or cmd fails
796 Optionally echo command output to stdout
797 Optionally remove the files/directories listed in cleanup upon failure
798 """
799
800 # Need to export PATH as binary could be in metadata paths
801 # rather than host provided
802 # Also include some other variables.
803 # FIXME: Should really include all export varaiables?
804 exportvars = ['HOME', 'PATH',
805 'HTTP_PROXY', 'http_proxy',
806 'HTTPS_PROXY', 'https_proxy',
807 'FTP_PROXY', 'ftp_proxy',
808 'FTPS_PROXY', 'ftps_proxy',
809 'NO_PROXY', 'no_proxy',
810 'ALL_PROXY', 'all_proxy',
811 'GIT_PROXY_COMMAND',
812 'GIT_SSL_CAINFO',
813 'GIT_SMART_HTTP',
814 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600815 'SOCKS5_USER', 'SOCKS5_PASSWD',
816 'DBUS_SESSION_BUS_ADDRESS',
817 'P4CONFIG']
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500818
819 if not cleanup:
820 cleanup = []
821
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600822 origenv = d.getVar("BB_ORIGENV", False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500823 for var in exportvars:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600824 val = d.getVar(var, True) or (origenv and origenv.getVar(var, True))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500825 if val:
826 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
827
828 logger.debug(1, "Running %s", cmd)
829
830 success = False
831 error_message = ""
832
833 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600834 (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500835 success = True
836 except bb.process.NotFoundError as e:
837 error_message = "Fetch command %s" % (e.command)
838 except bb.process.ExecutionError as e:
839 if e.stdout:
840 output = "output:\n%s\n%s" % (e.stdout, e.stderr)
841 elif e.stderr:
842 output = "output:\n%s" % e.stderr
843 else:
844 output = "no output"
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600845 error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500846 except bb.process.CmdError as e:
847 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
848 if not success:
849 for f in cleanup:
850 try:
851 bb.utils.remove(f, True)
852 except OSError:
853 pass
854
855 raise FetchError(error_message)
856
857 return output
858
859def check_network_access(d, info = "", url = None):
860 """
861 log remote network access, and error if BB_NO_NETWORK is set
862 """
863 if d.getVar("BB_NO_NETWORK", True) == "1":
864 raise NetworkAccess(url, info)
865 else:
866 logger.debug(1, "Fetcher accessed the network with the command %s" % info)
867
868def build_mirroruris(origud, mirrors, ld):
869 uris = []
870 uds = []
871
872 replacements = {}
873 replacements["TYPE"] = origud.type
874 replacements["HOST"] = origud.host
875 replacements["PATH"] = origud.path
876 replacements["BASENAME"] = origud.path.split("/")[-1]
877 replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
878
Patrick Williamsd7e96312015-09-22 08:09:05 -0500879 def adduri(ud, uris, uds, mirrors):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500880 for line in mirrors:
881 try:
882 (find, replace) = line
883 except ValueError:
884 continue
885 newuri = uri_replace(ud, find, replace, replacements, ld)
886 if not newuri or newuri in uris or newuri == origud.url:
887 continue
888
889 if not trusted_network(ld, newuri):
890 logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri))
891 continue
892
Patrick Williamsd7e96312015-09-22 08:09:05 -0500893 # Create a local copy of the mirrors minus the current line
894 # this will prevent us from recursively processing the same line
895 # as well as indirect recursion A -> B -> C -> A
896 localmirrors = list(mirrors)
897 localmirrors.remove(line)
898
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500899 try:
900 newud = FetchData(newuri, ld)
901 newud.setup_localpath(ld)
902 except bb.fetch2.BBFetchException as e:
903 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
904 logger.debug(1, str(e))
905 try:
906 # setup_localpath of file:// urls may fail, we should still see
907 # if mirrors of the url exist
Patrick Williamsd7e96312015-09-22 08:09:05 -0500908 adduri(newud, uris, uds, localmirrors)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500909 except UnboundLocalError:
910 pass
911 continue
912 uris.append(newuri)
913 uds.append(newud)
914
Patrick Williamsd7e96312015-09-22 08:09:05 -0500915 adduri(newud, uris, uds, localmirrors)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500916
Patrick Williamsd7e96312015-09-22 08:09:05 -0500917 adduri(origud, uris, uds, mirrors)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500918
919 return uris, uds
920
921def rename_bad_checksum(ud, suffix):
922 """
923 Renames files to have suffix from parameter
924 """
925
926 if ud.localpath is None:
927 return
928
929 new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
930 bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
931 bb.utils.movefile(ud.localpath, new_localpath)
932
933
934def try_mirror_url(fetch, origud, ud, ld, check = False):
935 # Return of None or a value means we're finished
936 # False means try another url
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500937
938 if ud.lockfile and ud.lockfile != origud.lockfile:
939 lf = bb.utils.lockfile(ud.lockfile)
940
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500941 try:
942 if check:
943 found = ud.method.checkstatus(fetch, ud, ld)
944 if found:
945 return found
946 return False
947
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500948 if not verify_donestamp(ud, ld, origud) or ud.method.need_update(ud, ld):
949 ud.method.download(ud, ld)
950 if hasattr(ud.method,"build_mirror_data"):
951 ud.method.build_mirror_data(ud, ld)
952
953 if not ud.localpath or not os.path.exists(ud.localpath):
954 return False
955
956 if ud.localpath == origud.localpath:
957 return ud.localpath
958
959 # We may be obtaining a mirror tarball which needs further processing by the real fetcher
960 # If that tarball is a local file:// we need to provide a symlink to it
961 dldir = ld.getVar("DL_DIR", True)
962 if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
963 and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
964 # Create donestamp in old format to avoid triggering a re-download
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500965 if ud.donestamp:
966 bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
967 open(ud.donestamp, 'w').close()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500968 dest = os.path.join(dldir, os.path.basename(ud.localpath))
969 if not os.path.exists(dest):
970 os.symlink(ud.localpath, dest)
971 if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
972 origud.method.download(origud, ld)
973 if hasattr(origud.method,"build_mirror_data"):
974 origud.method.build_mirror_data(origud, ld)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500975 return origud.localpath
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500976 # Otherwise the result is a local file:// and we symlink to it
977 if not os.path.exists(origud.localpath):
978 if os.path.islink(origud.localpath):
979 # Broken symbolic link
980 os.unlink(origud.localpath)
981
982 os.symlink(ud.localpath, origud.localpath)
983 update_stamp(origud, ld)
984 return ud.localpath
985
986 except bb.fetch2.NetworkAccess:
987 raise
988
989 except bb.fetch2.BBFetchException as e:
990 if isinstance(e, ChecksumError):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600991 logger.warning("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
992 logger.warning(str(e))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500993 if os.path.exists(ud.localpath):
994 rename_bad_checksum(ud, e.checksum)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500995 elif isinstance(e, NoChecksumError):
996 raise
997 else:
998 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
999 logger.debug(1, str(e))
1000 try:
1001 ud.method.clean(ud, ld)
1002 except UnboundLocalError:
1003 pass
1004 return False
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001005 finally:
1006 if ud.lockfile and ud.lockfile != origud.lockfile:
1007 bb.utils.unlockfile(lf)
1008
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001009
1010def try_mirrors(fetch, d, origud, mirrors, check = False):
1011 """
1012 Try to use a mirrored version of the sources.
1013 This method will be automatically called before the fetchers go.
1014
1015 d Is a bb.data instance
1016 uri is the original uri we're trying to download
1017 mirrors is the list of mirrors we're going to try
1018 """
1019 ld = d.createCopy()
1020
1021 uris, uds = build_mirroruris(origud, mirrors, ld)
1022
1023 for index, uri in enumerate(uris):
1024 ret = try_mirror_url(fetch, origud, uds[index], ld, check)
1025 if ret != False:
1026 return ret
1027 return None
1028
1029def trusted_network(d, url):
1030 """
1031 Use a trusted url during download if networking is enabled and
1032 BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
1033 Note: modifies SRC_URI & mirrors.
1034 """
1035 if d.getVar('BB_NO_NETWORK', True) == "1":
1036 return True
1037
1038 pkgname = d.expand(d.getVar('PN', False))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001039 trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001040
1041 if not trusted_hosts:
1042 trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS', True)
1043
1044 # Not enabled.
1045 if not trusted_hosts:
1046 return True
1047
1048 scheme, network, path, user, passwd, param = decodeurl(url)
1049
1050 if not network:
1051 return True
1052
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001053 network = network.split(':')[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001054 network = network.lower()
1055
1056 for host in trusted_hosts.split(" "):
1057 host = host.lower()
1058 if host.startswith("*.") and ("." + network).endswith(host[1:]):
1059 return True
1060 if host == network:
1061 return True
1062
1063 return False
1064
1065def srcrev_internal_helper(ud, d, name):
1066 """
1067 Return:
1068 a) a source revision if specified
1069 b) latest revision if SRCREV="AUTOINC"
1070 c) None if not specified
1071 """
1072
1073 srcrev = None
1074 pn = d.getVar("PN", True)
1075 attempts = []
1076 if name != '' and pn:
1077 attempts.append("SRCREV_%s_pn-%s" % (name, pn))
1078 if name != '':
1079 attempts.append("SRCREV_%s" % name)
1080 if pn:
1081 attempts.append("SRCREV_pn-%s" % pn)
1082 attempts.append("SRCREV")
1083
1084 for a in attempts:
1085 srcrev = d.getVar(a, True)
1086 if srcrev and srcrev != "INVALID":
1087 break
1088
1089 if 'rev' in ud.parm and 'tag' in ud.parm:
1090 raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
1091
1092 if 'rev' in ud.parm or 'tag' in ud.parm:
1093 if 'rev' in ud.parm:
1094 parmrev = ud.parm['rev']
1095 else:
1096 parmrev = ud.parm['tag']
1097 if srcrev == "INVALID" or not srcrev:
1098 return parmrev
1099 if srcrev != parmrev:
1100 raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please spcify one valid value" % (srcrev, parmrev))
1101 return parmrev
1102
1103 if srcrev == "INVALID" or not srcrev:
1104 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
1105 if srcrev == "AUTOINC":
1106 srcrev = ud.method.latest_revision(ud, d, name)
1107
1108 return srcrev
1109
1110def get_checksum_file_list(d):
1111 """ Get a list of files checksum in SRC_URI
1112
1113 Returns the resolved local paths of all local file entries in
1114 SRC_URI as a space-separated string
1115 """
1116 fetch = Fetch([], d, cache = False, localonly = True)
1117
1118 dl_dir = d.getVar('DL_DIR', True)
1119 filelist = []
1120 for u in fetch.urls:
1121 ud = fetch.ud[u]
1122
1123 if ud and isinstance(ud.method, local.Local):
1124 paths = ud.method.localpaths(ud, d)
1125 for f in paths:
1126 pth = ud.decodedurl
1127 if '*' in pth:
1128 f = os.path.join(os.path.abspath(f), pth)
1129 if f.startswith(dl_dir):
1130 # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
1131 if os.path.exists(f):
1132 bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
1133 else:
1134 bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
1135 filelist.append(f + ":" + str(os.path.exists(f)))
1136
1137 return " ".join(filelist)
1138
1139def get_file_checksums(filelist, pn):
1140 """Get a list of the checksums for a list of local files
1141
1142 Returns the checksums for a list of local files, caching the results as
1143 it proceeds
1144
1145 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001146 return _checksum_cache.get_checksums(filelist, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001147
1148
1149class FetchData(object):
1150 """
1151 A class which represents the fetcher state for a given URI.
1152 """
1153 def __init__(self, url, d, localonly = False):
1154 # localpath is the location of a downloaded result. If not set, the file is local.
1155 self.donestamp = None
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001156 self.needdonestamp = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001157 self.localfile = ""
1158 self.localpath = None
1159 self.lockfile = None
1160 self.mirrortarball = None
1161 self.basename = None
1162 self.basepath = None
1163 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
1164 self.date = self.getSRCDate(d)
1165 self.url = url
1166 if not self.user and "user" in self.parm:
1167 self.user = self.parm["user"]
1168 if not self.pswd and "pswd" in self.parm:
1169 self.pswd = self.parm["pswd"]
1170 self.setup = False
1171
1172 if "name" in self.parm:
1173 self.md5_name = "%s.md5sum" % self.parm["name"]
1174 self.sha256_name = "%s.sha256sum" % self.parm["name"]
1175 else:
1176 self.md5_name = "md5sum"
1177 self.sha256_name = "sha256sum"
1178 if self.md5_name in self.parm:
1179 self.md5_expected = self.parm[self.md5_name]
1180 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
1181 self.md5_expected = None
1182 else:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001183 self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name, True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001184 if self.sha256_name in self.parm:
1185 self.sha256_expected = self.parm[self.sha256_name]
1186 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
1187 self.sha256_expected = None
1188 else:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001189 self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name, True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001190 self.ignore_checksums = False
1191
1192 self.names = self.parm.get("name",'default').split(',')
1193
1194 self.method = None
1195 for m in methods:
1196 if m.supports(self, d):
1197 self.method = m
1198 break
1199
1200 if not self.method:
1201 raise NoMethodError(url)
1202
1203 if localonly and not isinstance(self.method, local.Local):
1204 raise NonLocalMethod()
1205
1206 if self.parm.get("proto", None) and "protocol" not in self.parm:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001207 logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001208 self.parm["protocol"] = self.parm.get("proto", None)
1209
1210 if hasattr(self.method, "urldata_init"):
1211 self.method.urldata_init(self, d)
1212
1213 if "localpath" in self.parm:
1214 # if user sets localpath for file, use it instead.
1215 self.localpath = self.parm["localpath"]
1216 self.basename = os.path.basename(self.localpath)
1217 elif self.localfile:
1218 self.localpath = self.method.localpath(self, d)
1219
1220 dldir = d.getVar("DL_DIR", True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001221
1222 if not self.needdonestamp:
1223 return
1224
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001225 # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
1226 if self.localpath and self.localpath.startswith(dldir):
1227 basepath = self.localpath
1228 elif self.localpath:
1229 basepath = dldir + os.sep + os.path.basename(self.localpath)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001230 elif self.basepath or self.basename:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001231 basepath = dldir + os.sep + (self.basepath or self.basename)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001232 else:
1233 bb.fatal("Can't determine lock path for url %s" % url)
1234
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001235 self.donestamp = basepath + '.done'
1236 self.lockfile = basepath + '.lock'
1237
1238 def setup_revisons(self, d):
1239 self.revisions = {}
1240 for name in self.names:
1241 self.revisions[name] = srcrev_internal_helper(self, d, name)
1242
1243 # add compatibility code for non name specified case
1244 if len(self.names) == 1:
1245 self.revision = self.revisions[self.names[0]]
1246
1247 def setup_localpath(self, d):
1248 if not self.localpath:
1249 self.localpath = self.method.localpath(self, d)
1250
1251 def getSRCDate(self, d):
1252 """
1253 Return the SRC Date for the component
1254
1255 d the bb.data module
1256 """
1257 if "srcdate" in self.parm:
1258 return self.parm['srcdate']
1259
1260 pn = d.getVar("PN", True)
1261
1262 if pn:
1263 return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True)
1264
1265 return d.getVar("SRCDATE", True) or d.getVar("DATE", True)
1266
1267class FetchMethod(object):
1268 """Base class for 'fetch'ing data"""
1269
1270 def __init__(self, urls=None):
1271 self.urls = []
1272
1273 def supports(self, urldata, d):
1274 """
1275 Check to see if this fetch class supports a given url.
1276 """
1277 return 0
1278
1279 def localpath(self, urldata, d):
1280 """
1281 Return the local filename of a given url assuming a successful fetch.
1282 Can also setup variables in urldata for use in go (saving code duplication
1283 and duplicate code execution)
1284 """
1285 return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
1286
1287 def supports_checksum(self, urldata):
1288 """
1289 Is localpath something that can be represented by a checksum?
1290 """
1291
1292 # We cannot compute checksums for directories
1293 if os.path.isdir(urldata.localpath) == True:
1294 return False
1295 if urldata.localpath.find("*") != -1:
1296 return False
1297
1298 return True
1299
1300 def recommends_checksum(self, urldata):
1301 """
1302 Is the backend on where checksumming is recommended (should warnings
1303 be displayed if there is no checksum)?
1304 """
1305 return False
1306
1307 def _strip_leading_slashes(self, relpath):
1308 """
1309 Remove leading slash as os.path.join can't cope
1310 """
1311 while os.path.isabs(relpath):
1312 relpath = relpath[1:]
1313 return relpath
1314
1315 def setUrls(self, urls):
1316 self.__urls = urls
1317
1318 def getUrls(self):
1319 return self.__urls
1320
1321 urls = property(getUrls, setUrls, None, "Urls property")
1322
1323 def need_update(self, ud, d):
1324 """
1325 Force a fetch, even if localpath exists?
1326 """
1327 if os.path.exists(ud.localpath):
1328 return False
1329 return True
1330
1331 def supports_srcrev(self):
1332 """
1333 The fetcher supports auto source revisions (SRCREV)
1334 """
1335 return False
1336
1337 def download(self, urldata, d):
1338 """
1339 Fetch urls
1340 Assumes localpath was called first
1341 """
1342 raise NoMethodError(url)
1343
1344 def unpack(self, urldata, rootdir, data):
1345 iterate = False
1346 file = urldata.localpath
1347
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001348 # Localpath can't deal with 'dir/*' entries, so it converts them to '.',
1349 # but it must be corrected back for local files copying
1350 if urldata.basename == '*' and file.endswith('/.'):
1351 file = '%s/%s' % (file.rstrip('/.'), urldata.path)
1352
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001353 try:
1354 unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
1355 except ValueError as exc:
1356 bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
1357 (file, urldata.parm.get('unpack')))
1358
1359 base, ext = os.path.splitext(file)
1360 if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']:
1361 efile = os.path.join(rootdir, os.path.basename(base))
1362 else:
1363 efile = file
1364 cmd = None
1365
1366 if unpack:
1367 if file.endswith('.tar'):
1368 cmd = 'tar x --no-same-owner -f %s' % file
1369 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
1370 cmd = 'tar xz --no-same-owner -f %s' % file
1371 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
1372 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
1373 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
1374 cmd = 'gzip -dc %s > %s' % (file, efile)
1375 elif file.endswith('.bz2'):
1376 cmd = 'bzip2 -dc %s > %s' % (file, efile)
1377 elif file.endswith('.tar.xz'):
1378 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
1379 elif file.endswith('.xz'):
1380 cmd = 'xz -dc %s > %s' % (file, efile)
1381 elif file.endswith('.tar.lz'):
1382 cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
1383 elif file.endswith('.lz'):
1384 cmd = 'lzip -dc %s > %s' % (file, efile)
1385 elif file.endswith('.zip') or file.endswith('.jar'):
1386 try:
1387 dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
1388 except ValueError as exc:
1389 bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
1390 (file, urldata.parm.get('dos')))
1391 cmd = 'unzip -q -o'
1392 if dos:
1393 cmd = '%s -a' % cmd
1394 cmd = "%s '%s'" % (cmd, file)
1395 elif file.endswith('.rpm') or file.endswith('.srpm'):
1396 if 'extract' in urldata.parm:
1397 unpack_file = urldata.parm.get('extract')
1398 cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
1399 iterate = True
1400 iterate_file = unpack_file
1401 else:
1402 cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
1403 elif file.endswith('.deb') or file.endswith('.ipk'):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001404 output = subprocess.check_output('ar -t %s' % file, preexec_fn=subprocess_setup, shell=True)
1405 datafile = None
1406 if output:
1407 for line in output.decode().splitlines():
1408 if line.startswith('data.tar.'):
1409 datafile = line
1410 break
1411 else:
1412 raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url)
1413 else:
1414 raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
1415 cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001416 elif file.endswith('.tar.7z'):
1417 cmd = '7z x -so %s | tar xf - ' % file
1418 elif file.endswith('.7z'):
1419 cmd = '7za x -y %s 1>/dev/null' % file
1420
1421 # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
1422 if 'subdir' in urldata.parm:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001423 subdir = urldata.parm.get('subdir')
1424 if os.path.isabs(subdir):
1425 if not os.path.realpath(subdir).startswith(os.path.realpath(rootdir)):
1426 raise UnpackError("subdir argument isn't a subdirectory of unpack root %s" % rootdir, urldata.url)
1427 unpackdir = subdir
1428 else:
1429 unpackdir = os.path.join(rootdir, subdir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001430 bb.utils.mkdirhier(unpackdir)
1431 else:
1432 unpackdir = rootdir
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001433
1434 if not unpack or not cmd:
1435 # If file == dest, then avoid any copies, as we already put the file into dest!
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001436 dest = os.path.join(unpackdir, os.path.basename(file))
1437 if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
1438 destdir = '.'
1439 # For file:// entries all intermediate dirs in path must be created at destination
1440 if urldata.type == "file":
1441 # Trailing '/' does a copying to wrong place
1442 urlpath = urldata.path.rstrip('/')
1443 # Want files places relative to cwd so no leading '/'
1444 urlpath = urlpath.lstrip('/')
1445 if urlpath.find("/") != -1:
1446 destdir = urlpath.rsplit("/", 1)[0] + '/'
1447 bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001448 cmd = 'cp -fpPRH %s %s' % (file, destdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001449
1450 if not cmd:
1451 return
1452
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001453 path = data.getVar('PATH', True)
1454 if path:
1455 cmd = "PATH=\"%s\" %s" % (path, cmd)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001456 bb.note("Unpacking %s to %s/" % (file, unpackdir))
1457 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=unpackdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001458
1459 if ret != 0:
1460 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
1461
1462 if iterate is True:
1463 iterate_urldata = urldata
1464 iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
1465 self.unpack(urldata, rootdir, data)
1466
1467 return
1468
1469 def clean(self, urldata, d):
1470 """
1471 Clean any existing full or partial download
1472 """
1473 bb.utils.remove(urldata.localpath)
1474
1475 def try_premirror(self, urldata, d):
1476 """
1477 Should premirrors be used?
1478 """
1479 return True
1480
1481 def checkstatus(self, fetch, urldata, d):
1482 """
1483 Check the status of a URL
1484 Assumes localpath was called first
1485 """
1486 logger.info("URL %s could not be checked for status since no method exists.", url)
1487 return True
1488
1489 def latest_revision(self, ud, d, name):
1490 """
1491 Look in the cache for the latest revision, if not present ask the SCM.
1492 """
1493 if not hasattr(self, "_latest_revision"):
1494 raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
1495
1496 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
1497 key = self.generate_revision_key(ud, d, name)
1498 try:
1499 return revs[key]
1500 except KeyError:
1501 revs[key] = rev = self._latest_revision(ud, d, name)
1502 return rev
1503
1504 def sortable_revision(self, ud, d, name):
1505 latest_rev = self._build_revision(ud, d, name)
1506 return True, str(latest_rev)
1507
1508 def generate_revision_key(self, ud, d, name):
1509 key = self._revision_key(ud, d, name)
1510 return "%s-%s" % (key, d.getVar("PN", True) or "")
1511
1512class Fetch(object):
1513 def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
1514 if localonly and cache:
1515 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
1516
1517 if len(urls) == 0:
1518 urls = d.getVar("SRC_URI", True).split()
1519 self.urls = urls
1520 self.d = d
1521 self.ud = {}
1522 self.connection_cache = connection_cache
1523
1524 fn = d.getVar('FILE', True)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001525 mc = d.getVar('__BBMULTICONFIG', True) or ""
1526 if cache and fn and mc + fn in urldata_cache:
1527 self.ud = urldata_cache[mc + fn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001528
1529 for url in urls:
1530 if url not in self.ud:
1531 try:
1532 self.ud[url] = FetchData(url, d, localonly)
1533 except NonLocalMethod:
1534 if localonly:
1535 self.ud[url] = None
1536 pass
1537
1538 if fn and cache:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001539 urldata_cache[mc + fn] = self.ud
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001540
1541 def localpath(self, url):
1542 if url not in self.urls:
1543 self.ud[url] = FetchData(url, self.d)
1544
1545 self.ud[url].setup_localpath(self.d)
1546 return self.d.expand(self.ud[url].localpath)
1547
1548 def localpaths(self):
1549 """
1550 Return a list of the local filenames, assuming successful fetch
1551 """
1552 local = []
1553
1554 for u in self.urls:
1555 ud = self.ud[u]
1556 ud.setup_localpath(self.d)
1557 local.append(ud.localpath)
1558
1559 return local
1560
1561 def download(self, urls=None):
1562 """
1563 Fetch all urls
1564 """
1565 if not urls:
1566 urls = self.urls
1567
1568 network = self.d.getVar("BB_NO_NETWORK", True)
1569 premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1")
1570
1571 for u in urls:
1572 ud = self.ud[u]
1573 ud.setup_localpath(self.d)
1574 m = ud.method
1575 localpath = ""
1576
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001577 if ud.lockfile:
1578 lf = bb.utils.lockfile(ud.lockfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001579
1580 try:
1581 self.d.setVar("BB_NO_NETWORK", network)
1582
1583 if verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
1584 localpath = ud.localpath
1585 elif m.try_premirror(ud, self.d):
1586 logger.debug(1, "Trying PREMIRRORS")
1587 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1588 localpath = try_mirrors(self, self.d, ud, mirrors, False)
1589
1590 if premirroronly:
1591 self.d.setVar("BB_NO_NETWORK", "1")
1592
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001593 firsterr = None
1594 verified_stamp = verify_donestamp(ud, self.d)
1595 if not localpath and (not verified_stamp or m.need_update(ud, self.d)):
1596 try:
1597 if not trusted_network(self.d, ud.url):
1598 raise UntrustedUrl(ud.url)
1599 logger.debug(1, "Trying Upstream")
1600 m.download(ud, self.d)
1601 if hasattr(m, "build_mirror_data"):
1602 m.build_mirror_data(ud, self.d)
1603 localpath = ud.localpath
1604 # early checksum verify, so that if checksum mismatched,
1605 # fetcher still have chance to fetch from mirror
1606 update_stamp(ud, self.d)
1607
1608 except bb.fetch2.NetworkAccess:
1609 raise
1610
1611 except BBFetchException as e:
1612 if isinstance(e, ChecksumError):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001613 logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001614 logger.debug(1, str(e))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001615 if os.path.exists(ud.localpath):
1616 rename_bad_checksum(ud, e.checksum)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001617 elif isinstance(e, NoChecksumError):
1618 raise
1619 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001620 logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001621 logger.debug(1, str(e))
1622 firsterr = e
1623 # Remove any incomplete fetch
1624 if not verified_stamp:
1625 m.clean(ud, self.d)
1626 logger.debug(1, "Trying MIRRORS")
1627 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
1628 localpath = try_mirrors(self, self.d, ud, mirrors)
1629
1630 if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
1631 if firsterr:
1632 logger.error(str(firsterr))
1633 raise FetchError("Unable to fetch URL from any source.", u)
1634
1635 update_stamp(ud, self.d)
1636
1637 except BBFetchException as e:
1638 if isinstance(e, ChecksumError):
1639 logger.error("Checksum failure fetching %s" % u)
1640 raise
1641
1642 finally:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001643 if ud.lockfile:
1644 bb.utils.unlockfile(lf)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001645
1646 def checkstatus(self, urls=None):
1647 """
1648 Check all urls exist upstream
1649 """
1650
1651 if not urls:
1652 urls = self.urls
1653
1654 for u in urls:
1655 ud = self.ud[u]
1656 ud.setup_localpath(self.d)
1657 m = ud.method
1658 logger.debug(1, "Testing URL %s", u)
1659 # First try checking uri, u, from PREMIRRORS
1660 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1661 ret = try_mirrors(self, self.d, ud, mirrors, True)
1662 if not ret:
1663 # Next try checking from the original uri, u
1664 try:
1665 ret = m.checkstatus(self, ud, self.d)
1666 except:
1667 # Finally, try checking uri, u, from MIRRORS
1668 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
1669 ret = try_mirrors(self, self.d, ud, mirrors, True)
1670
1671 if not ret:
1672 raise FetchError("URL %s doesn't work" % u, u)
1673
1674 def unpack(self, root, urls=None):
1675 """
1676 Check all urls exist upstream
1677 """
1678
1679 if not urls:
1680 urls = self.urls
1681
1682 for u in urls:
1683 ud = self.ud[u]
1684 ud.setup_localpath(self.d)
1685
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001686 if ud.lockfile:
1687 lf = bb.utils.lockfile(ud.lockfile)
1688
1689 ud.method.unpack(ud, root, self.d)
1690
1691 if ud.lockfile:
1692 bb.utils.unlockfile(lf)
1693
1694 def clean(self, urls=None):
1695 """
1696 Clean files that the fetcher gets or places
1697 """
1698
1699 if not urls:
1700 urls = self.urls
1701
1702 for url in urls:
1703 if url not in self.ud:
1704 self.ud[url] = FetchData(url, d)
1705 ud = self.ud[url]
1706 ud.setup_localpath(self.d)
1707
1708 if not ud.localfile and ud.localpath is None:
1709 continue
1710
1711 if ud.lockfile:
1712 lf = bb.utils.lockfile(ud.lockfile)
1713
1714 ud.method.clean(ud, self.d)
1715 if ud.donestamp:
1716 bb.utils.remove(ud.donestamp)
1717
1718 if ud.lockfile:
1719 bb.utils.unlockfile(lf)
1720
1721class FetchConnectionCache(object):
1722 """
1723 A class which represents an container for socket connections.
1724 """
1725 def __init__(self):
1726 self.cache = {}
1727
1728 def get_connection_name(self, host, port):
1729 return host + ':' + str(port)
1730
1731 def add_connection(self, host, port, connection):
1732 cn = self.get_connection_name(host, port)
1733
1734 if cn not in self.cache:
1735 self.cache[cn] = connection
1736
1737 def get_connection(self, host, port):
1738 connection = None
1739
1740 cn = self.get_connection_name(host, port)
1741 if cn in self.cache:
1742 connection = self.cache[cn]
1743
1744 return connection
1745
1746 def remove_connection(self, host, port):
1747 cn = self.get_connection_name(host, port)
1748 if cn in self.cache:
1749 self.cache[cn].close()
1750 del self.cache[cn]
1751
1752 def close_connections(self):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001753 for cn in list(self.cache.keys()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001754 self.cache[cn].close()
1755 del self.cache[cn]
1756
1757from . import cvs
1758from . import git
1759from . import gitsm
1760from . import gitannex
1761from . import local
1762from . import svn
1763from . import wget
1764from . import ssh
1765from . import sftp
1766from . import perforce
1767from . import bzr
1768from . import hg
1769from . import osc
1770from . import repo
1771from . import clearcase
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001772from . import npm
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001773
1774methods.append(local.Local())
1775methods.append(wget.Wget())
1776methods.append(svn.Svn())
1777methods.append(git.Git())
1778methods.append(gitsm.GitSM())
1779methods.append(gitannex.GitANNEX())
1780methods.append(cvs.Cvs())
1781methods.append(ssh.SSH())
1782methods.append(sftp.SFTP())
1783methods.append(perforce.Perforce())
1784methods.append(bzr.Bzr())
1785methods.append(hg.Hg())
1786methods.append(osc.Osc())
1787methods.append(repo.Repo())
1788methods.append(clearcase.ClearCase())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001789methods.append(npm.Npm())