Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 1 | from contextlib import contextmanager |
| 2 | @contextmanager |
| 3 | def create_socket(url, d): |
| 4 | import urllib |
| 5 | socket = urllib.urlopen(url, proxies=get_proxies(d)) |
| 6 | try: |
| 7 | yield socket |
| 8 | finally: |
| 9 | socket.close() |
| 10 | |
| 11 | def get_proxies(d): |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 12 | proxies = {} |
| 13 | for key in ['http', 'https', 'ftp', 'ftps', 'no', 'all']: |
| 14 | proxy = d.getVar(key + '_proxy', True) |
| 15 | if proxy: |
| 16 | proxies[key] = proxy |
| 17 | return proxies |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 18 | |
| 19 | def get_links_from_url(url, d): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 20 | "Return all the href links found on the web location" |
| 21 | |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 22 | import sgmllib |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 23 | |
| 24 | class LinksParser(sgmllib.SGMLParser): |
| 25 | def parse(self, s): |
| 26 | "Parse the given string 's'." |
| 27 | self.feed(s) |
| 28 | self.close() |
| 29 | |
| 30 | def __init__(self, verbose=0): |
| 31 | "Initialise an object passing 'verbose' to the superclass." |
| 32 | sgmllib.SGMLParser.__init__(self, verbose) |
| 33 | self.hyperlinks = [] |
| 34 | |
| 35 | def start_a(self, attributes): |
| 36 | "Process a hyperlink and its 'attributes'." |
| 37 | for name, value in attributes: |
| 38 | if name == "href": |
| 39 | self.hyperlinks.append(value.strip('/')) |
| 40 | |
| 41 | def get_hyperlinks(self): |
| 42 | "Return the list of hyperlinks." |
| 43 | return self.hyperlinks |
| 44 | |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 45 | with create_socket(url,d) as sock: |
| 46 | webpage = sock.read() |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 47 | |
| 48 | linksparser = LinksParser() |
| 49 | linksparser.parse(webpage) |
| 50 | return linksparser.get_hyperlinks() |
| 51 | |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 52 | def find_latest_numeric_release(url, d): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 53 | "Find the latest listed numeric release on the given url" |
| 54 | max=0 |
| 55 | maxstr="" |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 56 | for link in get_links_from_url(url, d): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 57 | try: |
| 58 | release = float(link) |
| 59 | except: |
| 60 | release = 0 |
| 61 | if release > max: |
| 62 | max = release |
| 63 | maxstr = link |
| 64 | return maxstr |
| 65 | |
| 66 | def is_src_rpm(name): |
| 67 | "Check if the link is pointing to a src.rpm file" |
| 68 | if name[-8:] == ".src.rpm": |
| 69 | return True |
| 70 | else: |
| 71 | return False |
| 72 | |
| 73 | def package_name_from_srpm(srpm): |
| 74 | "Strip out the package name from the src.rpm filename" |
| 75 | strings = srpm.split('-') |
| 76 | package_name = strings[0] |
| 77 | for i in range(1, len (strings) - 1): |
| 78 | str = strings[i] |
| 79 | if not str[0].isdigit(): |
| 80 | package_name += '-' + str |
| 81 | return package_name |
| 82 | |
| 83 | def clean_package_list(package_list): |
| 84 | "Removes multiple entries of packages and sorts the list" |
| 85 | set = {} |
| 86 | map(set.__setitem__, package_list, []) |
| 87 | return set.keys() |
| 88 | |
| 89 | |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 90 | def get_latest_released_meego_source_package_list(d): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 91 | "Returns list of all the name os packages in the latest meego distro" |
| 92 | |
| 93 | package_names = [] |
| 94 | try: |
| 95 | f = open("/tmp/Meego-1.1", "r") |
| 96 | for line in f: |
| 97 | package_names.append(line[:-1] + ":" + "main") # Also strip the '\n' at the end |
| 98 | except IOError: pass |
| 99 | package_list=clean_package_list(package_names) |
| 100 | return "1.0", package_list |
| 101 | |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 102 | def get_source_package_list_from_url(url, section, d): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 103 | "Return a sectioned list of package names from a URL list" |
| 104 | |
| 105 | bb.note("Reading %s: %s" % (url, section)) |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 106 | links = get_links_from_url(url, d) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 107 | srpms = filter(is_src_rpm, links) |
| 108 | names_list = map(package_name_from_srpm, srpms) |
| 109 | |
| 110 | new_pkgs = [] |
| 111 | for pkgs in names_list: |
| 112 | new_pkgs.append(pkgs + ":" + section) |
| 113 | |
| 114 | return new_pkgs |
| 115 | |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 116 | def get_latest_released_fedora_source_package_list(d): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 117 | "Returns list of all the name os packages in the latest fedora distro" |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 118 | latest = find_latest_numeric_release("http://archive.fedoraproject.org/pub/fedora/linux/releases/", d) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 119 | |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 120 | package_names = get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/releases/%s/Fedora/source/SRPMS/" % latest, "main", d) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 121 | |
| 122 | # package_names += get_source_package_list_from_url("http://download.fedora.redhat.com/pub/fedora/linux/releases/%s/Everything/source/SPRMS/" % latest, "everything") |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 123 | package_names += get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/updates/%s/SRPMS/" % latest, "updates", d) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 124 | |
| 125 | package_list=clean_package_list(package_names) |
| 126 | |
| 127 | return latest, package_list |
| 128 | |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 129 | def get_latest_released_opensuse_source_package_list(d): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 130 | "Returns list of all the name os packages in the latest opensuse distro" |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 131 | latest = find_latest_numeric_release("http://download.opensuse.org/source/distribution/",d) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 132 | |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 133 | package_names = get_source_package_list_from_url("http://download.opensuse.org/source/distribution/%s/repo/oss/suse/src/" % latest, "main", d) |
| 134 | package_names += get_source_package_list_from_url("http://download.opensuse.org/update/%s/rpm/src/" % latest, "updates", d) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 135 | |
| 136 | package_list=clean_package_list(package_names) |
| 137 | return latest, package_list |
| 138 | |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 139 | def get_latest_released_mandriva_source_package_list(d): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 140 | "Returns list of all the name os packages in the latest mandriva distro" |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 141 | latest = find_latest_numeric_release("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/", d) |
| 142 | package_names = get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/release/" % latest, "main", d) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 143 | # package_names += get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/contrib/release/" % latest, "contrib") |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 144 | package_names += get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/updates/" % latest, "updates", d) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 145 | |
| 146 | package_list=clean_package_list(package_names) |
| 147 | return latest, package_list |
| 148 | |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 149 | def find_latest_debian_release(url, d): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 150 | "Find the latest listed debian release on the given url" |
| 151 | |
| 152 | releases = [] |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 153 | for link in get_links_from_url(url, d): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 154 | if link[:6] == "Debian": |
| 155 | if ';' not in link: |
| 156 | releases.append(link) |
| 157 | releases.sort() |
| 158 | try: |
| 159 | return releases.pop()[6:] |
| 160 | except: |
| 161 | return "_NotFound_" |
| 162 | |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 163 | def get_debian_style_source_package_list(url, section, d): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 164 | "Return the list of package-names stored in the debian style Sources.gz file" |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 165 | with create_socket(url,d) as sock: |
| 166 | webpage = sock.read() |
| 167 | import tempfile |
| 168 | tmpfile = tempfile.NamedTemporaryFile(mode='wb', prefix='oecore.', suffix='.tmp', delete=False) |
| 169 | tmpfilename=tmpfile.name |
| 170 | tmpfile.write(sock.read()) |
| 171 | tmpfile.close() |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 172 | import gzip |
| 173 | bb.note("Reading %s: %s" % (url, section)) |
| 174 | |
| 175 | f = gzip.open(tmpfilename) |
| 176 | package_names = [] |
| 177 | for line in f: |
| 178 | if line[:9] == "Package: ": |
| 179 | package_names.append(line[9:-1] + ":" + section) # Also strip the '\n' at the end |
| 180 | os.unlink(tmpfilename) |
| 181 | |
| 182 | return package_names |
| 183 | |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 184 | def get_latest_released_debian_source_package_list(d): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 185 | "Returns list of all the name os packages in the latest debian distro" |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 186 | latest = find_latest_debian_release("http://ftp.debian.org/debian/dists/", d) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 187 | url = "http://ftp.debian.org/debian/dists/stable/main/source/Sources.gz" |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 188 | package_names = get_debian_style_source_package_list(url, "main", d) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 189 | # url = "http://ftp.debian.org/debian/dists/stable/contrib/source/Sources.gz" |
| 190 | # package_names += get_debian_style_source_package_list(url, "contrib") |
| 191 | url = "http://ftp.debian.org/debian/dists/stable-proposed-updates/main/source/Sources.gz" |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 192 | package_names += get_debian_style_source_package_list(url, "updates", d) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 193 | package_list=clean_package_list(package_names) |
| 194 | return latest, package_list |
| 195 | |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 196 | def find_latest_ubuntu_release(url, d): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 197 | "Find the latest listed ubuntu release on the given url" |
| 198 | url += "?C=M;O=D" # Descending Sort by Last Modified |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 199 | for link in get_links_from_url(url, d): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 200 | if link[-8:] == "-updates": |
| 201 | return link[:-8] |
| 202 | return "_NotFound_" |
| 203 | |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 204 | def get_latest_released_ubuntu_source_package_list(d): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 205 | "Returns list of all the name os packages in the latest ubuntu distro" |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 206 | latest = find_latest_ubuntu_release("http://archive.ubuntu.com/ubuntu/dists/", d) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 207 | url = "http://archive.ubuntu.com/ubuntu/dists/%s/main/source/Sources.gz" % latest |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 208 | package_names = get_debian_style_source_package_list(url, "main", d) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 209 | # url = "http://archive.ubuntu.com/ubuntu/dists/%s/multiverse/source/Sources.gz" % latest |
| 210 | # package_names += get_debian_style_source_package_list(url, "multiverse") |
| 211 | # url = "http://archive.ubuntu.com/ubuntu/dists/%s/universe/source/Sources.gz" % latest |
| 212 | # package_names += get_debian_style_source_package_list(url, "universe") |
| 213 | url = "http://archive.ubuntu.com/ubuntu/dists/%s-updates/main/source/Sources.gz" % latest |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 214 | package_names += get_debian_style_source_package_list(url, "updates", d) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 215 | package_list=clean_package_list(package_names) |
| 216 | return latest, package_list |
| 217 | |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 218 | def create_distro_packages_list(distro_check_dir, d): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 219 | pkglst_dir = os.path.join(distro_check_dir, "package_lists") |
| 220 | if not os.path.isdir (pkglst_dir): |
| 221 | os.makedirs(pkglst_dir) |
| 222 | # first clear old stuff |
| 223 | for file in os.listdir(pkglst_dir): |
| 224 | os.unlink(os.path.join(pkglst_dir, file)) |
| 225 | |
| 226 | per_distro_functions = [ |
| 227 | ["Debian", get_latest_released_debian_source_package_list], |
| 228 | ["Ubuntu", get_latest_released_ubuntu_source_package_list], |
| 229 | ["Fedora", get_latest_released_fedora_source_package_list], |
| 230 | ["OpenSuSE", get_latest_released_opensuse_source_package_list], |
| 231 | ["Mandriva", get_latest_released_mandriva_source_package_list], |
| 232 | ["Meego", get_latest_released_meego_source_package_list] |
| 233 | ] |
| 234 | |
| 235 | from datetime import datetime |
| 236 | begin = datetime.now() |
| 237 | for distro in per_distro_functions: |
| 238 | name = distro[0] |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 239 | release, package_list = distro[1](d) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 240 | bb.note("Distro: %s, Latest Release: %s, # src packages: %d" % (name, release, len(package_list))) |
| 241 | package_list_file = os.path.join(pkglst_dir, name + "-" + release) |
| 242 | f = open(package_list_file, "w+b") |
| 243 | for pkg in package_list: |
| 244 | f.write(pkg + "\n") |
| 245 | f.close() |
| 246 | end = datetime.now() |
| 247 | delta = end - begin |
| 248 | bb.note("package_list generatiosn took this much time: %d seconds" % delta.seconds) |
| 249 | |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 250 | def update_distro_data(distro_check_dir, datetime, d): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 251 | """ |
| 252 | If distro packages list data is old then rebuild it. |
| 253 | The operations has to be protected by a lock so that |
| 254 | only one thread performes it at a time. |
| 255 | """ |
| 256 | if not os.path.isdir (distro_check_dir): |
| 257 | try: |
| 258 | bb.note ("Making new directory: %s" % distro_check_dir) |
| 259 | os.makedirs (distro_check_dir) |
| 260 | except OSError: |
| 261 | raise Exception('Unable to create directory %s' % (distro_check_dir)) |
| 262 | |
| 263 | |
| 264 | datetime_file = os.path.join(distro_check_dir, "build_datetime") |
| 265 | saved_datetime = "_invalid_" |
| 266 | import fcntl |
| 267 | try: |
| 268 | if not os.path.exists(datetime_file): |
| 269 | open(datetime_file, 'w+b').close() # touch the file so that the next open won't fail |
| 270 | |
| 271 | f = open(datetime_file, "r+b") |
| 272 | fcntl.lockf(f, fcntl.LOCK_EX) |
| 273 | saved_datetime = f.read() |
| 274 | if saved_datetime[0:8] != datetime[0:8]: |
| 275 | bb.note("The build datetime did not match: saved:%s current:%s" % (saved_datetime, datetime)) |
| 276 | bb.note("Regenerating distro package lists") |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 277 | create_distro_packages_list(distro_check_dir, d) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 278 | f.seek(0) |
| 279 | f.write(datetime) |
| 280 | |
| 281 | except OSError: |
| 282 | raise Exception('Unable to read/write this file: %s' % (datetime_file)) |
| 283 | finally: |
| 284 | fcntl.lockf(f, fcntl.LOCK_UN) |
| 285 | f.close() |
| 286 | |
| 287 | def compare_in_distro_packages_list(distro_check_dir, d): |
| 288 | if not os.path.isdir(distro_check_dir): |
| 289 | raise Exception("compare_in_distro_packages_list: invalid distro_check_dir passed") |
| 290 | |
| 291 | localdata = bb.data.createCopy(d) |
| 292 | pkglst_dir = os.path.join(distro_check_dir, "package_lists") |
| 293 | matching_distros = [] |
| 294 | pn = d.getVar('PN', True) |
| 295 | recipe_name = d.getVar('PN', True) |
| 296 | bb.note("Checking: %s" % pn) |
| 297 | |
| 298 | trim_dict = dict({"-native":"-native", "-cross":"-cross", "-initial":"-initial"}) |
| 299 | |
| 300 | if pn.find("-native") != -1: |
| 301 | pnstripped = pn.split("-native") |
| 302 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
| 303 | bb.data.update_data(localdata) |
| 304 | recipe_name = pnstripped[0] |
| 305 | |
| 306 | if pn.startswith("nativesdk-"): |
| 307 | pnstripped = pn.split("nativesdk-") |
| 308 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[1] + ":" + d.getVar('OVERRIDES', True)) |
| 309 | bb.data.update_data(localdata) |
| 310 | recipe_name = pnstripped[1] |
| 311 | |
| 312 | if pn.find("-cross") != -1: |
| 313 | pnstripped = pn.split("-cross") |
| 314 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
| 315 | bb.data.update_data(localdata) |
| 316 | recipe_name = pnstripped[0] |
| 317 | |
| 318 | if pn.find("-initial") != -1: |
| 319 | pnstripped = pn.split("-initial") |
| 320 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) |
| 321 | bb.data.update_data(localdata) |
| 322 | recipe_name = pnstripped[0] |
| 323 | |
| 324 | bb.note("Recipe: %s" % recipe_name) |
| 325 | tmp = localdata.getVar('DISTRO_PN_ALIAS', True) |
| 326 | |
| 327 | distro_exceptions = dict({"OE-Core":'OE-Core', "OpenedHand":'OpenedHand', "Intel":'Intel', "Upstream":'Upstream', "Windriver":'Windriver', "OSPDT":'OSPDT Approved', "Poky":'poky'}) |
| 328 | |
| 329 | if tmp: |
| 330 | list = tmp.split(' ') |
| 331 | for str in list: |
| 332 | if str and str.find("=") == -1 and distro_exceptions[str]: |
| 333 | matching_distros.append(str) |
| 334 | |
| 335 | distro_pn_aliases = {} |
| 336 | if tmp: |
| 337 | list = tmp.split(' ') |
| 338 | for str in list: |
| 339 | if str.find("=") != -1: |
| 340 | (dist, pn_alias) = str.split('=') |
| 341 | distro_pn_aliases[dist.strip().lower()] = pn_alias.strip() |
| 342 | |
| 343 | for file in os.listdir(pkglst_dir): |
| 344 | (distro, distro_release) = file.split("-") |
| 345 | f = open(os.path.join(pkglst_dir, file), "rb") |
| 346 | for line in f: |
| 347 | (pkg, section) = line.split(":") |
| 348 | if distro.lower() in distro_pn_aliases: |
| 349 | pn = distro_pn_aliases[distro.lower()] |
| 350 | else: |
| 351 | pn = recipe_name |
| 352 | if pn == pkg: |
| 353 | matching_distros.append(distro + "-" + section[:-1]) # strip the \n at the end |
| 354 | f.close() |
| 355 | break |
| 356 | f.close() |
| 357 | |
| 358 | |
| 359 | if tmp != None: |
| 360 | list = tmp.split(' ') |
| 361 | for item in list: |
| 362 | matching_distros.append(item) |
| 363 | bb.note("Matching: %s" % matching_distros) |
| 364 | return matching_distros |
| 365 | |
| 366 | def create_log_file(d, logname): |
| 367 | import subprocess |
| 368 | logpath = d.getVar('LOG_DIR', True) |
| 369 | bb.utils.mkdirhier(logpath) |
| 370 | logfn, logsuffix = os.path.splitext(logname) |
| 371 | logfile = os.path.join(logpath, "%s.%s%s" % (logfn, d.getVar('DATETIME', True), logsuffix)) |
| 372 | if not os.path.exists(logfile): |
| 373 | slogfile = os.path.join(logpath, logname) |
| 374 | if os.path.exists(slogfile): |
| 375 | os.remove(slogfile) |
| 376 | subprocess.call("touch %s" % logfile, shell=True) |
| 377 | os.symlink(logfile, slogfile) |
| 378 | d.setVar('LOG_FILE', logfile) |
| 379 | return logfile |
| 380 | |
| 381 | |
| 382 | def save_distro_check_result(result, datetime, result_file, d): |
| 383 | pn = d.getVar('PN', True) |
| 384 | logdir = d.getVar('LOG_DIR', True) |
| 385 | if not logdir: |
| 386 | bb.error("LOG_DIR variable is not defined, can't write the distro_check results") |
| 387 | return |
| 388 | if not os.path.isdir(logdir): |
| 389 | os.makedirs(logdir) |
| 390 | line = pn |
| 391 | for i in result: |
| 392 | line = line + "," + i |
| 393 | f = open(result_file, "a") |
| 394 | import fcntl |
| 395 | fcntl.lockf(f, fcntl.LOCK_EX) |
| 396 | f.seek(0, os.SEEK_END) # seek to the end of file |
| 397 | f.write(line + "\n") |
| 398 | fcntl.lockf(f, fcntl.LOCK_UN) |
| 399 | f.close() |