Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 1 | """ |
| 2 | BitBake 'Fetch' implementation for Amazon AWS S3. |
| 3 | |
| 4 | Class for fetching files from Amazon S3 using the AWS Command Line Interface. |
| 5 | The aws tool must be correctly installed and configured prior to use. |
| 6 | |
| 7 | """ |
| 8 | |
| 9 | # Copyright (C) 2017, Andre McCurdy <armccurdy@gmail.com> |
| 10 | # |
| 11 | # Based in part on bb.fetch2.wget: |
| 12 | # Copyright (C) 2003, 2004 Chris Larson |
| 13 | # |
Brad Bishop | c342db3 | 2019-05-15 21:57:59 -0400 | [diff] [blame] | 14 | # SPDX-License-Identifier: GPL-2.0-only |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 15 | # |
| 16 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig |
| 17 | |
| 18 | import os |
| 19 | import bb |
| 20 | import urllib.request, urllib.parse, urllib.error |
| 21 | from bb.fetch2 import FetchMethod |
| 22 | from bb.fetch2 import FetchError |
| 23 | from bb.fetch2 import runfetchcmd |
| 24 | |
| 25 | class S3(FetchMethod): |
| 26 | """Class to fetch urls via 'aws s3'""" |
| 27 | |
| 28 | def supports(self, ud, d): |
| 29 | """ |
| 30 | Check to see if a given url can be fetched with s3. |
| 31 | """ |
| 32 | return ud.type in ['s3'] |
| 33 | |
| 34 | def recommends_checksum(self, urldata): |
| 35 | return True |
| 36 | |
| 37 | def urldata_init(self, ud, d): |
| 38 | if 'downloadfilename' in ud.parm: |
| 39 | ud.basename = ud.parm['downloadfilename'] |
| 40 | else: |
| 41 | ud.basename = os.path.basename(ud.path) |
| 42 | |
| 43 | ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) |
| 44 | |
| 45 | ud.basecmd = d.getVar("FETCHCMD_s3") or "/usr/bin/env aws s3" |
| 46 | |
| 47 | def download(self, ud, d): |
| 48 | """ |
| 49 | Fetch urls |
| 50 | Assumes localpath was called first |
| 51 | """ |
| 52 | |
| 53 | cmd = '%s cp s3://%s%s %s' % (ud.basecmd, ud.host, ud.path, ud.localpath) |
| 54 | bb.fetch2.check_network_access(d, cmd, ud.url) |
| 55 | runfetchcmd(cmd, d) |
| 56 | |
| 57 | # Additional sanity checks copied from the wget class (although there |
| 58 | # are no known issues which mean these are required, treat the aws cli |
| 59 | # tool with a little healthy suspicion). |
| 60 | |
| 61 | if not os.path.exists(ud.localpath): |
| 62 | raise FetchError("The aws cp command returned success for s3://%s%s but %s doesn't exist?!" % (ud.host, ud.path, ud.localpath)) |
| 63 | |
| 64 | if os.path.getsize(ud.localpath) == 0: |
| 65 | os.remove(ud.localpath) |
| 66 | raise FetchError("The aws cp command for s3://%s%s resulted in a zero size file?! Deleting and failing since this isn't right." % (ud.host, ud.path)) |
| 67 | |
| 68 | return True |
| 69 | |
| 70 | def checkstatus(self, fetch, ud, d): |
| 71 | """ |
| 72 | Check the status of a URL |
| 73 | """ |
| 74 | |
| 75 | cmd = '%s ls s3://%s%s' % (ud.basecmd, ud.host, ud.path) |
| 76 | bb.fetch2.check_network_access(d, cmd, ud.url) |
| 77 | output = runfetchcmd(cmd, d) |
| 78 | |
| 79 | # "aws s3 ls s3://mybucket/foo" will exit with success even if the file |
| 80 | # is not found, so check output of the command to confirm success. |
| 81 | |
| 82 | if not output: |
| 83 | raise FetchError("The aws ls command for s3://%s%s gave empty output" % (ud.host, ud.path)) |
| 84 | |
| 85 | return True |