blob: 9a49e64a00546a1ccecee23a0123f4c9f5f35f89 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001"""
2BitBake 'Fetch' implementations
3
4Classes for obtaining upstream sources for the
5BitBake build tools.
6
7"""
8
9# Copyright (C) 2003, 2004 Chris Larson
10#
Brad Bishopc342db32019-05-15 21:57:59 -040011# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050012#
13# Based on functions from the base bb module, Copyright 2003 Holger Schurig
14
Andrew Geissler82c905d2020-04-13 13:39:40 -050015import shlex
Patrick Williamsc124f4f2015-09-15 14:41:29 -050016import re
17import tempfile
Patrick Williamsc124f4f2015-09-15 14:41:29 -050018import os
Brad Bishopd7bf8c12018-02-25 22:55:05 -050019import errno
Patrick Williamsc124f4f2015-09-15 14:41:29 -050020import bb
Patrick Williamsc0f7c042017-02-23 20:41:17 -060021import bb.progress
Brad Bishop19323692019-04-05 15:28:33 -040022import socket
23import http.client
Patrick Williamsc0f7c042017-02-23 20:41:17 -060024import urllib.request, urllib.parse, urllib.error
Patrick Williamsc124f4f2015-09-15 14:41:29 -050025from bb.fetch2 import FetchMethod
26from bb.fetch2 import FetchError
27from bb.fetch2 import logger
28from bb.fetch2 import runfetchcmd
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050029from bb.utils import export_proxies
Patrick Williamsc124f4f2015-09-15 14:41:29 -050030from bs4 import BeautifulSoup
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050031from bs4 import SoupStrainer
Patrick Williamsc124f4f2015-09-15 14:41:29 -050032
Patrick Williamsc0f7c042017-02-23 20:41:17 -060033class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
34 """
35 Extract progress information from wget output.
36 Note: relies on --progress=dot (with -v or without -q/-nv) being
37 specified on the wget command line.
38 """
39 def __init__(self, d):
40 super(WgetProgressHandler, self).__init__(d)
41 # Send an initial progress event so the bar gets shown
42 self._fire_progress(0)
43
44 def writeline(self, line):
45 percs = re.findall(r'(\d+)%\s+([\d.]+[A-Z])', line)
46 if percs:
47 progress = int(percs[-1][0])
48 rate = percs[-1][1] + '/s'
49 self.update(progress, rate)
50 return False
51 return True
52
53
Patrick Williamsc124f4f2015-09-15 14:41:29 -050054class Wget(FetchMethod):
Patrick Williams0ca19cc2021-08-16 14:03:13 -050055 """Class to fetch urls via 'wget'"""
Andrew Geisslerd1e89492021-02-12 15:35:20 -060056
57 # CDNs like CloudFlare may do a 'browser integrity test' which can fail
58 # with the standard wget/urllib User-Agent, so pretend to be a modern
59 # browser.
60 user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0"
61
Patrick Williams0ca19cc2021-08-16 14:03:13 -050062 def check_certs(self, d):
63 """
64 Should certificates be checked?
65 """
66 return (d.getVar("BB_CHECK_SSL_CERTS") or "1") != "0"
67
Patrick Williamsc124f4f2015-09-15 14:41:29 -050068 def supports(self, ud, d):
69 """
70 Check to see if a given url can be fetched with wget.
71 """
72 return ud.type in ['http', 'https', 'ftp']
73
74 def recommends_checksum(self, urldata):
75 return True
76
77 def urldata_init(self, ud, d):
78 if 'protocol' in ud.parm:
79 if ud.parm['protocol'] == 'git':
80 raise bb.fetch2.ParameterError("Invalid protocol - if you wish to fetch from a git repository using http, you need to instead use the git:// prefix with protocol=http", ud.url)
81
82 if 'downloadfilename' in ud.parm:
83 ud.basename = ud.parm['downloadfilename']
84 else:
85 ud.basename = os.path.basename(ud.path)
86
Brad Bishop6e60e8b2018-02-01 10:27:11 -050087 ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050088 if not ud.localfile:
Brad Bishop6e60e8b2018-02-01 10:27:11 -050089 ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050090
Patrick Williams0ca19cc2021-08-16 14:03:13 -050091 self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30 --passive-ftp"
92
93 if not self.check_certs(d):
94 self.basecmd += " --no-check-certificate"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050095
Brad Bishopd7bf8c12018-02-25 22:55:05 -050096 def _runwget(self, ud, d, command, quiet, workdir=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -050097
Patrick Williamsc0f7c042017-02-23 20:41:17 -060098 progresshandler = WgetProgressHandler(d)
99
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600100 logger.debug2("Fetching %s using command '%s'" % (ud.url, command))
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500101 bb.fetch2.check_network_access(d, command, ud.url)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500102 runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500103
104 def download(self, ud, d):
105 """Fetch urls"""
106
107 fetchcmd = self.basecmd
108
109 if 'downloadfilename' in ud.parm:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500110 localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile)
111 bb.utils.mkdirhier(os.path.dirname(localpath))
112 fetchcmd += " -O %s" % shlex.quote(localpath)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500113
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500114 if ud.user and ud.pswd:
115 fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600116
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500117 uri = ud.url.split(";")[0]
118 if os.path.exists(ud.localpath):
119 # file exists, but we didnt complete it.. trying again..
120 fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri)
121 else:
122 fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri)
123
124 self._runwget(ud, d, fetchcmd, False)
125
126 # Sanity check since wget can pretend it succeed when it didn't
127 # Also, this used to happen if sourceforge sent us to the mirror page
128 if not os.path.exists(ud.localpath):
129 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
130
131 if os.path.getsize(ud.localpath) == 0:
132 os.remove(ud.localpath)
133 raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
134
135 return True
136
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600137 def checkstatus(self, fetch, ud, d, try_again=True):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600138 class HTTPConnectionCache(http.client.HTTPConnection):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500139 if fetch.connection_cache:
140 def connect(self):
141 """Connect to the host and port specified in __init__."""
142
143 sock = fetch.connection_cache.get_connection(self.host, self.port)
144 if sock:
145 self.sock = sock
146 else:
147 self.sock = socket.create_connection((self.host, self.port),
148 self.timeout, self.source_address)
149 fetch.connection_cache.add_connection(self.host, self.port, self.sock)
150
151 if self._tunnel_host:
152 self._tunnel()
153
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600154 class CacheHTTPHandler(urllib.request.HTTPHandler):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500155 def http_open(self, req):
156 return self.do_open(HTTPConnectionCache, req)
157
158 def do_open(self, http_class, req):
159 """Return an addinfourl object for the request, using http_class.
160
161 http_class must implement the HTTPConnection API from httplib.
162 The addinfourl return value is a file-like object. It also
163 has methods and attributes including:
164 - info(): return a mimetools.Message object for the headers
165 - geturl(): return the original request URL
166 - code: HTTP status code
167 """
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600168 host = req.host
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500169 if not host:
Brad Bishop19323692019-04-05 15:28:33 -0400170 raise urllib.error.URLError('no host given')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500171
172 h = http_class(host, timeout=req.timeout) # will parse host:port
173 h.set_debuglevel(self._debuglevel)
174
175 headers = dict(req.unredirected_hdrs)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600176 headers.update(dict((k, v) for k, v in list(req.headers.items())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500177 if k not in headers))
178
179 # We want to make an HTTP/1.1 request, but the addinfourl
180 # class isn't prepared to deal with a persistent connection.
181 # It will try to read all remaining data from the socket,
182 # which will block while the server waits for the next request.
183 # So make sure the connection gets closed after the (only)
184 # request.
185
186 # Don't close connection when connection_cache is enabled,
Brad Bishop19323692019-04-05 15:28:33 -0400187 if fetch.connection_cache is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500188 headers["Connection"] = "close"
189 else:
190 headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
191
192 headers = dict(
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600193 (name.title(), val) for name, val in list(headers.items()))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500194
195 if req._tunnel_host:
196 tunnel_headers = {}
197 proxy_auth_hdr = "Proxy-Authorization"
198 if proxy_auth_hdr in headers:
199 tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
200 # Proxy-Authorization should not be sent to origin
201 # server.
202 del headers[proxy_auth_hdr]
203 h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
204
205 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600206 h.request(req.get_method(), req.selector, req.data, headers)
207 except socket.error as err: # XXX what error?
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500208 # Don't close connection when cache is enabled.
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500209 # Instead, try to detect connections that are no longer
210 # usable (for example, closed unexpectedly) and remove
211 # them from the cache.
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500212 if fetch.connection_cache is None:
213 h.close()
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500214 elif isinstance(err, OSError) and err.errno == errno.EBADF:
215 # This happens when the server closes the connection despite the Keep-Alive.
216 # Apparently urllib then uses the file descriptor, expecting it to be
217 # connected, when in reality the connection is already gone.
218 # We let the request fail and expect it to be
219 # tried once more ("try_again" in check_status()),
220 # with the dead connection removed from the cache.
221 # If it still fails, we give up, which can happend for bad
222 # HTTP proxy settings.
223 fetch.connection_cache.remove_connection(h.host, h.port)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600224 raise urllib.error.URLError(err)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500225 else:
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500226 r = h.getresponse()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500227
228 # Pick apart the HTTPResponse object to get the addinfourl
229 # object initialized properly.
230
231 # Wrap the HTTPResponse object in socket's file object adapter
232 # for Windows. That adapter calls recv(), so delegate recv()
233 # to read(). This weird wrapping allows the returned object to
234 # have readline() and readlines() methods.
235
236 # XXX It might be better to extract the read buffering code
237 # out of socket._fileobject() and into a base class.
238 r.recv = r.read
239
240 # no data, just have to read
241 r.read()
242 class fp_dummy(object):
243 def read(self):
244 return ""
245 def readline(self):
246 return ""
247 def close(self):
248 pass
Brad Bishop316dfdd2018-06-25 12:45:53 -0400249 closed = False
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500250
Brad Bishop19323692019-04-05 15:28:33 -0400251 resp = urllib.response.addinfourl(fp_dummy(), r.msg, req.get_full_url())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500252 resp.code = r.status
253 resp.msg = r.reason
254
255 # Close connection when server request it.
256 if fetch.connection_cache is not None:
257 if 'Connection' in r.msg and r.msg['Connection'] == 'close':
258 fetch.connection_cache.remove_connection(h.host, h.port)
259
260 return resp
261
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600262 class HTTPMethodFallback(urllib.request.BaseHandler):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500263 """
264 Fallback to GET if HEAD is not allowed (405 HTTP error)
265 """
266 def http_error_405(self, req, fp, code, msg, headers):
267 fp.read()
268 fp.close()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500269
Brad Bishop08902b02019-08-20 09:16:51 -0400270 if req.get_method() != 'GET':
271 newheaders = dict((k, v) for k, v in list(req.headers.items())
272 if k.lower() not in ("content-length", "content-type"))
273 return self.parent.open(urllib.request.Request(req.get_full_url(),
274 headers=newheaders,
275 origin_req_host=req.origin_req_host,
276 unverifiable=True))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500277
Brad Bishop08902b02019-08-20 09:16:51 -0400278 raise urllib.request.HTTPError(req, code, msg, headers, None)
Brad Bishop19323692019-04-05 15:28:33 -0400279
280 # Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403
281 # Forbidden when they actually mean 405 Method Not Allowed.
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500282 http_error_403 = http_error_405
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500283
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500284
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600285 class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500286 """
287 urllib2.HTTPRedirectHandler resets the method to GET on redirect,
288 when we want to follow redirects using the original method.
289 """
290 def redirect_request(self, req, fp, code, msg, headers, newurl):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600291 newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
Brad Bishop19323692019-04-05 15:28:33 -0400292 newreq.get_method = req.get_method
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500293 return newreq
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500294
Patrick Williams0ca19cc2021-08-16 14:03:13 -0500295 # We need to update the environment here as both the proxy and HTTPS
296 # handlers need variables set. The proxy needs http_proxy and friends to
297 # be set, and HTTPSHandler ends up calling into openssl to load the
298 # certificates. In buildtools configurations this will be looking at the
299 # wrong place for certificates by default: we set SSL_CERT_FILE to the
300 # right location in the buildtools environment script but as BitBake
301 # prunes prunes the environment this is lost. When binaries are executed
302 # runfetchcmd ensures these values are in the environment, but this is
303 # pure Python so we need to update the environment.
304 #
305 # Avoid tramping the environment too much by using bb.utils.environment
306 # to scope the changes to the build_opener request, which is when the
307 # environment lookups happen.
308 newenv = {}
309 for name in bb.fetch2.FETCH_EXPORT_VARS:
310 value = d.getVar(name)
311 if not value:
312 origenv = d.getVar("BB_ORIGENV")
313 if origenv:
314 value = origenv.getVar(name)
315 if value:
316 newenv[name] = value
317
318 with bb.utils.environment(**newenv):
319 import ssl
320
321 if self.check_certs(d):
322 context = ssl.create_default_context()
323 else:
324 context = ssl._create_unverified_context()
325
326 handlers = [FixedHTTPRedirectHandler,
327 HTTPMethodFallback,
328 urllib.request.ProxyHandler(),
329 CacheHTTPHandler(),
330 urllib.request.HTTPSHandler(context=context)]
331 opener = urllib.request.build_opener(*handlers)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500332
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500333 try:
Andrew Geisslerd159c7f2021-09-02 21:05:58 -0500334 uri = ud.url.split(";")[0]
335 r = urllib.request.Request(uri)
336 r.get_method = lambda: "HEAD"
337 # Some servers (FusionForge, as used on Alioth) require that the
338 # optional Accept header is set.
339 r.add_header("Accept", "*/*")
340 r.add_header("User-Agent", self.user_agent)
341 def add_basic_auth(login_str, request):
342 '''Adds Basic auth to http request, pass in login:password as string'''
343 import base64
344 encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8")
345 authheader = "Basic %s" % encodeuser
346 r.add_header("Authorization", authheader)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500347
Andrew Geisslerd159c7f2021-09-02 21:05:58 -0500348 if ud.user and ud.pswd:
349 add_basic_auth(ud.user + ':' + ud.pswd, r)
350
351 try:
352 import netrc
353 n = netrc.netrc()
354 login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname)
355 add_basic_auth("%s:%s" % (login, password), r)
356 except (TypeError, ImportError, IOError, netrc.NetrcParseError):
357 pass
358
359 with opener.open(r) as response:
360 pass
361 except urllib.error.URLError as e:
362 if try_again:
363 logger.debug2("checkstatus: trying again")
364 return self.checkstatus(fetch, ud, d, False)
365 else:
366 # debug for now to avoid spamming the logs in e.g. remote sstate searches
367 logger.debug2("checkstatus() urlopen failed: %s" % e)
368 return False
369 except ConnectionResetError as e:
370 if try_again:
371 logger.debug2("checkstatus: trying again")
372 return self.checkstatus(fetch, ud, d, False)
373 else:
374 # debug for now to avoid spamming the logs in e.g. remote sstate searches
375 logger.debug2("checkstatus() urlopen failed: %s" % e)
376 return False
377
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500378 return True
379
380 def _parse_path(self, regex, s):
381 """
382 Find and group name, version and archive type in the given string s
383 """
384
385 m = regex.search(s)
386 if m:
387 pname = ''
388 pver = ''
389 ptype = ''
390
391 mdict = m.groupdict()
392 if 'name' in mdict.keys():
393 pname = mdict['name']
394 if 'pver' in mdict.keys():
395 pver = mdict['pver']
396 if 'type' in mdict.keys():
397 ptype = mdict['type']
398
399 bb.debug(3, "_parse_path: %s, %s, %s" % (pname, pver, ptype))
400
401 return (pname, pver, ptype)
402
403 return None
404
405 def _modelate_version(self, version):
406 if version[0] in ['.', '-']:
407 if version[1].isdigit():
408 version = version[1] + version[0] + version[2:len(version)]
409 else:
410 version = version[1:len(version)]
411
412 version = re.sub('-', '.', version)
413 version = re.sub('_', '.', version)
414 version = re.sub('(rc)+', '.1000.', version)
415 version = re.sub('(beta)+', '.100.', version)
416 version = re.sub('(alpha)+', '.10.', version)
417 if version[0] == 'v':
418 version = version[1:len(version)]
419 return version
420
421 def _vercmp(self, old, new):
422 """
423 Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
424 purpose. PE is cleared in comparison as it's not for build, and PR is cleared too
425 for simplicity as it's somehow difficult to get from various upstream format
426 """
427
428 (oldpn, oldpv, oldsuffix) = old
429 (newpn, newpv, newsuffix) = new
430
Brad Bishop19323692019-04-05 15:28:33 -0400431 # Check for a new suffix type that we have never heard of before
432 if newsuffix:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500433 m = self.suffix_regex_comp.search(newsuffix)
434 if not m:
435 bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
436 return False
437
Brad Bishop19323692019-04-05 15:28:33 -0400438 # Not our package so ignore it
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500439 if oldpn != newpn:
440 return False
441
442 oldpv = self._modelate_version(oldpv)
443 newpv = self._modelate_version(newpv)
444
445 return bb.utils.vercmp(("0", oldpv, ""), ("0", newpv, ""))
446
447 def _fetch_index(self, uri, ud, d):
448 """
449 Run fetch checkstatus to get directory information
450 """
451 f = tempfile.NamedTemporaryFile()
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500452 with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500453 fetchcmd = self.basecmd
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600454 fetchcmd += " -O " + f.name + " --user-agent='" + self.user_agent + "' '" + uri + "'"
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500455 try:
456 self._runwget(ud, d, fetchcmd, True, workdir=workdir)
457 fetchresult = f.read()
458 except bb.fetch2.BBFetchException:
459 fetchresult = ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500460
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500461 return fetchresult
462
463 def _check_latest_version(self, url, package, package_regex, current_version, ud, d):
464 """
465 Return the latest version of a package inside a given directory path
466 If error or no version, return ""
467 """
468 valid = 0
469 version = ['', '', '']
470
471 bb.debug(3, "VersionURL: %s" % (url))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500472 soup = BeautifulSoup(self._fetch_index(url, ud, d), "html.parser", parse_only=SoupStrainer("a"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500473 if not soup:
474 bb.debug(3, "*** %s NO SOUP" % (url))
475 return ""
476
477 for line in soup.find_all('a', href=True):
478 bb.debug(3, "line['href'] = '%s'" % (line['href']))
479 bb.debug(3, "line = '%s'" % (str(line)))
480
481 newver = self._parse_path(package_regex, line['href'])
482 if not newver:
483 newver = self._parse_path(package_regex, str(line))
484
485 if newver:
486 bb.debug(3, "Upstream version found: %s" % newver[1])
487 if valid == 0:
488 version = newver
489 valid = 1
490 elif self._vercmp(version, newver) < 0:
491 version = newver
492
493 pupver = re.sub('_', '.', version[1])
494
495 bb.debug(3, "*** %s -> UpstreamVersion = %s (CurrentVersion = %s)" %
496 (package, pupver or "N/A", current_version[1]))
497
498 if valid:
499 return pupver
500
501 return ""
502
Brad Bishop19323692019-04-05 15:28:33 -0400503 def _check_latest_version_by_dir(self, dirver, package, package_regex, current_version, ud, d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500504 """
Brad Bishop19323692019-04-05 15:28:33 -0400505 Scan every directory in order to get upstream version.
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500506 """
507 version_dir = ['', '', '']
508 version = ['', '', '']
509
William A. Kennington IIIac69b482021-06-02 12:28:27 -0700510 dirver_regex = re.compile(r"(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])*(\d+))")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500511 s = dirver_regex.search(dirver)
512 if s:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500513 version_dir[1] = s.group('ver')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500514 else:
515 version_dir[1] = dirver
516
517 dirs_uri = bb.fetch.encodeurl([ud.type, ud.host,
518 ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
519 bb.debug(3, "DirURL: %s, %s" % (dirs_uri, package))
520
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500521 soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d), "html.parser", parse_only=SoupStrainer("a"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500522 if not soup:
523 return version[1]
524
525 for line in soup.find_all('a', href=True):
526 s = dirver_regex.search(line['href'].strip("/"))
527 if s:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500528 sver = s.group('ver')
529
530 # When prefix is part of the version directory it need to
531 # ensure that only version directory is used so remove previous
532 # directories if exists.
533 #
534 # Example: pfx = '/dir1/dir2/v' and version = '2.5' the expected
535 # result is v2.5.
536 spfx = s.group('pfx').split('/')[-1]
537
538 version_dir_new = ['', sver, '']
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500539 if self._vercmp(version_dir, version_dir_new) <= 0:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500540 dirver_new = spfx + sver
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500541 path = ud.path.replace(dirver, dirver_new, True) \
542 .split(package)[0]
543 uri = bb.fetch.encodeurl([ud.type, ud.host, path,
544 ud.user, ud.pswd, {}])
545
546 pupver = self._check_latest_version(uri,
547 package, package_regex, current_version, ud, d)
548 if pupver:
549 version[1] = pupver
550
551 version_dir = version_dir_new
552
553 return version[1]
554
555 def _init_regexes(self, package, ud, d):
556 """
557 Match as many patterns as possible such as:
558 gnome-common-2.20.0.tar.gz (most common format)
559 gtk+-2.90.1.tar.gz
560 xf86-input-synaptics-12.6.9.tar.gz
561 dri2proto-2.3.tar.gz
562 blktool_4.orig.tar.gz
563 libid3tag-0.15.1b.tar.gz
564 unzip552.tar.gz
565 icu4c-3_6-src.tgz
566 genext2fs_1.3.orig.tar.gz
567 gst-fluendo-mp3
568 """
569 # match most patterns which uses "-" as separator to version digits
Brad Bishop19323692019-04-05 15:28:33 -0400570 pn_prefix1 = r"[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500571 # a loose pattern such as for unzip552.tar.gz
Brad Bishop19323692019-04-05 15:28:33 -0400572 pn_prefix2 = r"[a-zA-Z]+"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500573 # a loose pattern such as for 80325-quicky-0.4.tar.gz
Brad Bishop19323692019-04-05 15:28:33 -0400574 pn_prefix3 = r"[0-9]+[-]?[a-zA-Z]+"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500575 # Save the Package Name (pn) Regex for use later
Brad Bishop19323692019-04-05 15:28:33 -0400576 pn_regex = r"(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500577
578 # match version
Brad Bishop19323692019-04-05 15:28:33 -0400579 pver_regex = r"(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500580
581 # match arch
582 parch_regex = "-source|_all_"
583
584 # src.rpm extension was added only for rpm package. Can be removed if the rpm
585 # packaged will always be considered as having to be manually upgraded
Brad Bishop19323692019-04-05 15:28:33 -0400586 psuffix_regex = r"(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500587
588 # match name, version and archive type of a package
Brad Bishop19323692019-04-05 15:28:33 -0400589 package_regex_comp = re.compile(r"(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500590 % (pn_regex, pver_regex, parch_regex, psuffix_regex))
591 self.suffix_regex_comp = re.compile(psuffix_regex)
592
593 # compile regex, can be specific by package or generic regex
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500594 pn_regex = d.getVar('UPSTREAM_CHECK_REGEX')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500595 if pn_regex:
596 package_custom_regex_comp = re.compile(pn_regex)
597 else:
598 version = self._parse_path(package_regex_comp, package)
599 if version:
600 package_custom_regex_comp = re.compile(
Brad Bishop19323692019-04-05 15:28:33 -0400601 r"(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500602 (re.escape(version[0]), pver_regex, parch_regex, psuffix_regex))
603 else:
604 package_custom_regex_comp = None
605
606 return package_custom_regex_comp
607
608 def latest_versionstring(self, ud, d):
609 """
610 Manipulate the URL and try to obtain the latest package version
611
612 sanity check to ensure same name and type.
613 """
614 package = ud.path.split("/")[-1]
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500615 current_version = ['', d.getVar('PV'), '']
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500616
617 """possible to have no version in pkg name, such as spectrum-fw"""
Brad Bishop19323692019-04-05 15:28:33 -0400618 if not re.search(r"\d+", package):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500619 current_version[1] = re.sub('_', '.', current_version[1])
620 current_version[1] = re.sub('-', '.', current_version[1])
621 return (current_version[1], '')
622
623 package_regex = self._init_regexes(package, ud, d)
624 if package_regex is None:
625 bb.warn("latest_versionstring: package %s don't match pattern" % (package))
626 return ('', '')
627 bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern))
628
629 uri = ""
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500630 regex_uri = d.getVar("UPSTREAM_CHECK_URI")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500631 if not regex_uri:
632 path = ud.path.split(package)[0]
633
634 # search for version matches on folders inside the path, like:
635 # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
Brad Bishop19323692019-04-05 15:28:33 -0400636 dirver_regex = re.compile(r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500637 m = dirver_regex.search(path)
638 if m:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500639 pn = d.getVar('PN')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500640 dirver = m.group('dirver')
641
Brad Bishop19323692019-04-05 15:28:33 -0400642 dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500643 if not dirver_pn_regex.search(dirver):
644 return (self._check_latest_version_by_dir(dirver,
645 package, package_regex, current_version, ud, d), '')
646
647 uri = bb.fetch.encodeurl([ud.type, ud.host, path, ud.user, ud.pswd, {}])
648 else:
649 uri = regex_uri
650
651 return (self._check_latest_version(uri, package, package_regex,
652 current_version, ud, d), '')