Andrew Geissler | 95ac1b8 | 2021-03-31 14:34:31 -0500 | [diff] [blame] | 1 | """ |
| 2 | BitBake 'Fetch' Azure Storage implementation |
| 3 | |
| 4 | """ |
| 5 | |
| 6 | # Copyright (C) 2021 Alejandro Hernandez Samaniego |
| 7 | # |
| 8 | # Based on bb.fetch2.wget: |
| 9 | # Copyright (C) 2003, 2004 Chris Larson |
| 10 | # |
| 11 | # SPDX-License-Identifier: GPL-2.0-only |
| 12 | # |
| 13 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig |
| 14 | |
| 15 | import shlex |
| 16 | import os |
| 17 | import bb |
| 18 | from bb.fetch2 import FetchError |
| 19 | from bb.fetch2 import logger |
| 20 | from bb.fetch2.wget import Wget |
| 21 | |
| 22 | |
| 23 | class Az(Wget): |
| 24 | |
| 25 | def supports(self, ud, d): |
| 26 | """ |
| 27 | Check to see if a given url can be fetched from Azure Storage |
| 28 | """ |
| 29 | return ud.type in ['az'] |
| 30 | |
| 31 | |
| 32 | def checkstatus(self, fetch, ud, d, try_again=True): |
| 33 | |
| 34 | # checkstatus discards parameters either way, we need to do this before adding the SAS |
| 35 | ud.url = ud.url.replace('az://','https://').split(';')[0] |
| 36 | |
| 37 | az_sas = d.getVar('AZ_SAS') |
| 38 | if az_sas and az_sas not in ud.url: |
| 39 | ud.url += az_sas |
| 40 | |
| 41 | return Wget.checkstatus(self, fetch, ud, d, try_again) |
| 42 | |
| 43 | # Override download method, include retries |
| 44 | def download(self, ud, d, retries=3): |
| 45 | """Fetch urls""" |
| 46 | |
| 47 | # If were reaching the account transaction limit we might be refused a connection, |
| 48 | # retrying allows us to avoid false negatives since the limit changes over time |
| 49 | fetchcmd = self.basecmd + ' --retry-connrefused --waitretry=5' |
| 50 | |
| 51 | # We need to provide a localpath to avoid wget using the SAS |
| 52 | # ud.localfile either has the downloadfilename or ud.path |
| 53 | localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) |
| 54 | bb.utils.mkdirhier(os.path.dirname(localpath)) |
| 55 | fetchcmd += " -O %s" % shlex.quote(localpath) |
| 56 | |
| 57 | |
| 58 | if ud.user and ud.pswd: |
| 59 | fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd) |
| 60 | |
| 61 | # Check if a Shared Access Signature was given and use it |
| 62 | az_sas = d.getVar('AZ_SAS') |
| 63 | |
| 64 | if az_sas: |
| 65 | azuri = '%s%s%s%s' % ('https://', ud.host, ud.path, az_sas) |
| 66 | else: |
| 67 | azuri = '%s%s%s' % ('https://', ud.host, ud.path) |
| 68 | |
| 69 | if os.path.exists(ud.localpath): |
| 70 | # file exists, but we didnt complete it.. trying again. |
| 71 | fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % azuri) |
| 72 | else: |
| 73 | fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % azuri) |
| 74 | |
| 75 | try: |
| 76 | self._runwget(ud, d, fetchcmd, False) |
| 77 | except FetchError as e: |
| 78 | # Azure fails on handshake sometimes when using wget after some stress, producing a |
| 79 | # FetchError from the fetcher, if the artifact exists retyring should succeed |
| 80 | if 'Unable to establish SSL connection' in str(e): |
| 81 | logger.debug2('Unable to establish SSL connection: Retries remaining: %s, Retrying...' % retries) |
| 82 | self.download(ud, d, retries -1) |
| 83 | |
| 84 | # Sanity check since wget can pretend it succeed when it didn't |
| 85 | # Also, this used to happen if sourceforge sent us to the mirror page |
| 86 | if not os.path.exists(ud.localpath): |
| 87 | raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (azuri, ud.localpath), azuri) |
| 88 | |
| 89 | if os.path.getsize(ud.localpath) == 0: |
| 90 | os.remove(ud.localpath) |
| 91 | raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (azuri), azuri) |
| 92 | |
| 93 | return True |