Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 1 | #!/usr/bin/python |
| 2 | |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 3 | # This script generates the unit test coverage report for openbmc project. |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 4 | # |
| 5 | # Usage: |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 6 | # get_unit_test_report.py target_dir [url_file] |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 7 | # |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 8 | # Positional arguments: |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 9 | # target_dir Target directory in pwd to place all cloned repos and logs. |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 10 | # url_file Text file containing url of repositories. Optional. |
| 11 | # By using this argument, the user can get a report only for |
| 12 | # specific repositories given in the file. |
| 13 | # Refer ./scripts/repositories.txt |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 14 | # |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 15 | # Examples: |
| 16 | # get_unit_test_report.py target_dir |
| 17 | # get_unit_test_report.py target_dir repositories.txt |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 18 | # |
| 19 | # Output format: |
| 20 | # |
| 21 | # ***********************************OUTPUT*********************************** |
| 22 | # https://github.com/openbmc/phosphor-dbus-monitor.git NO |
| 23 | # https://github.com/openbmc/phosphor-sel-logger.git;protocol=git NO |
| 24 | # ***********************************OUTPUT*********************************** |
| 25 | # |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 26 | # Other outputs and errors are redirected to output.log and debug.log in |
| 27 | # target_dir. |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 28 | |
| 29 | import argparse |
| 30 | import logging |
| 31 | import os |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 32 | import re |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 33 | import shutil |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 34 | import subprocess |
| 35 | |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 36 | import requests |
| 37 | |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 38 | # Repo list not expected to contain UT. Will be moved to a file in future. |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 39 | skip_list = [ |
| 40 | "openbmc-tools", |
| 41 | "inarp", |
| 42 | "openbmc", |
| 43 | "openbmc.github.io", |
| 44 | "phosphor-ecc", |
| 45 | "phosphor-pcie-presence", |
| 46 | "phosphor-u-boot-env-mgr", |
| 47 | "rrd-ipmi-blob", |
| 48 | "librrdplus", |
| 49 | "openpower-inventory-upload", |
| 50 | "openpower-logging", |
| 51 | "openpower-power-control", |
| 52 | "docs", |
| 53 | "openbmc-test-automation", |
| 54 | "openbmc-build-scripts", |
| 55 | "skeleton", |
| 56 | "linux", |
| 57 | # Not active, expected to be archived soon. |
| 58 | "ibm-pldm-oem", |
| 59 | ] |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 60 | |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 61 | |
| 62 | # Create parser. |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 63 | text = """%(prog)s target_dir [url_file] |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 64 | |
| 65 | Example usages: |
| 66 | get_unit_test_report.py target_dir |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 67 | get_unit_test_report.py target_dir repositories.txt""" |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 68 | |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 69 | parser = argparse.ArgumentParser( |
| 70 | usage=text, description="Script generates the unit test coverage report" |
| 71 | ) |
| 72 | parser.add_argument( |
| 73 | "target_dir", |
| 74 | type=str, |
| 75 | help="""Name of a non-existing directory in pwd to store all |
| 76 | cloned repos, logs and UT reports""", |
| 77 | ) |
| 78 | parser.add_argument( |
| 79 | "url_file", |
| 80 | type=str, |
| 81 | nargs="?", |
| 82 | help="""Text file containing url of repositories. |
| 83 | By using this argument, the user can get a report only for |
| 84 | specific repositories given in the file. |
| 85 | Refer ./scripts/repositories.txt""", |
| 86 | ) |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 87 | args = parser.parse_args() |
| 88 | |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 89 | input_urls = [] |
| 90 | if args.url_file: |
| 91 | try: |
| 92 | # Get URLs from the file. |
| 93 | with open(args.url_file) as reader: |
| 94 | file_content = reader.read().splitlines() |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 95 | input_urls = list(filter(lambda x: x, file_content)) |
| 96 | if not (input_urls): |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 97 | print("Input file {} is empty. Quitting...".format(args.url_file)) |
| 98 | quit() |
| 99 | except IOError as e: |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 100 | print( |
| 101 | "Issue in reading file '{}'. Reason: {}".format( |
| 102 | args.url_file, str(e) |
| 103 | ) |
| 104 | ) |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 105 | quit() |
| 106 | |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 107 | |
| 108 | # Create target working directory. |
| 109 | pwd = os.getcwd() |
| 110 | working_dir = os.path.join(pwd, args.target_dir) |
| 111 | try: |
| 112 | os.mkdir(working_dir) |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 113 | except OSError: |
| 114 | answer = input( |
| 115 | "Target directory " |
| 116 | + working_dir |
| 117 | + " already exists. " |
| 118 | + "Do you want to delete [Y/N]: " |
| 119 | ) |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 120 | if answer == "Y": |
| 121 | try: |
| 122 | shutil.rmtree(working_dir) |
| 123 | os.mkdir(working_dir) |
| 124 | except OSError as e: |
| 125 | print(str(e)) |
| 126 | quit() |
| 127 | else: |
| 128 | print("Exiting....") |
| 129 | quit() |
| 130 | |
| 131 | # Create log directory. |
| 132 | log_dir = os.path.join(working_dir, "logs") |
| 133 | try: |
| 134 | os.mkdir(log_dir) |
| 135 | except OSError as e: |
| 136 | print("Unable to create log directory: " + log_dir) |
| 137 | print(str(e)) |
| 138 | quit() |
| 139 | |
| 140 | |
| 141 | # Log files |
| 142 | debug_file = os.path.join(log_dir, "debug.log") |
| 143 | output_file = os.path.join(log_dir, "output.log") |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 144 | logging.basicConfig( |
| 145 | format="%(levelname)s - %(message)s", |
| 146 | level=logging.DEBUG, |
| 147 | filename=debug_file, |
| 148 | ) |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 149 | logger = logging.getLogger(__name__) |
| 150 | |
| 151 | # Create handlers |
| 152 | console_handler = logging.StreamHandler() |
| 153 | file_handler = logging.FileHandler(output_file) |
| 154 | console_handler.setLevel(logging.INFO) |
| 155 | file_handler.setLevel(logging.INFO) |
| 156 | |
| 157 | # Create formatters and add it to handlers |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 158 | log_format = logging.Formatter("%(message)s") |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 159 | console_handler.setFormatter(log_format) |
| 160 | file_handler.setFormatter(log_format) |
| 161 | |
| 162 | # Add handlers to the logger |
| 163 | logger.addHandler(console_handler) |
| 164 | logger.addHandler(file_handler) |
| 165 | |
| 166 | |
| 167 | # Create report directory. |
| 168 | report_dir = os.path.join(working_dir, "reports") |
| 169 | try: |
| 170 | os.mkdir(report_dir) |
| 171 | except OSError as e: |
| 172 | logger.error("Unable to create report directory: " + report_dir) |
| 173 | logger.error(str(e)) |
| 174 | quit() |
| 175 | |
| 176 | # Clone OpenBmc build scripts. |
| 177 | try: |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 178 | output = subprocess.check_output( |
| 179 | "git clone https://github.com/openbmc/openbmc-build-scripts.git", |
| 180 | shell=True, |
| 181 | cwd=working_dir, |
| 182 | stderr=subprocess.STDOUT, |
| 183 | ) |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 184 | logger.debug(output) |
| 185 | except subprocess.CalledProcessError as e: |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 186 | logger.error(e.output) |
| 187 | logger.error(e.cmd) |
| 188 | logger.error("Unable to clone openbmc-build-scripts") |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 189 | quit() |
| 190 | |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 191 | repo_data = [] |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 192 | if input_urls: |
| 193 | api_url = "https://api.github.com/repos/openbmc/" |
| 194 | for url in input_urls: |
| 195 | try: |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 196 | repo_name = url.strip().split("/")[-1].split(";")[0].split(".")[0] |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 197 | except IndexError as e: |
| 198 | logger.error("ERROR: Unable to get sandbox name for url " + url) |
| 199 | logger.error("Reason: " + str(e)) |
| 200 | continue |
| 201 | |
| 202 | try: |
| 203 | resp = requests.get(api_url + repo_name) |
| 204 | if resp.status_code != 200: |
| 205 | logger.info(api_url + repo_name + " ==> " + resp.reason) |
| 206 | continue |
| 207 | repo_data.extend([resp.json()]) |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 208 | except ValueError: |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 209 | logger.error("ERROR: Failed to get response for " + repo_name) |
| 210 | logger.error(resp) |
| 211 | continue |
| 212 | |
| 213 | else: |
| 214 | # Get number of pages. |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 215 | resp = requests.head("https://api.github.com/users/openbmc/repos") |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 216 | if resp.status_code != 200: |
| 217 | logger.error("Error! Unable to get repositories") |
| 218 | logger.error(resp.status_code) |
| 219 | logger.error(resp.reason) |
| 220 | quit() |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 221 | num_of_pages = int(resp.links["last"]["url"].split("page=")[-1]) |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 222 | logger.debug("No. of pages: " + str(num_of_pages)) |
| 223 | |
| 224 | # Fetch data from all pages. |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 225 | for page in range(1, num_of_pages + 1): |
| 226 | resp = requests.get( |
| 227 | "https://api.github.com/users/openbmc/repos?page=" + str(page) |
| 228 | ) |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 229 | data = resp.json() |
| 230 | repo_data.extend(data) |
| 231 | |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 232 | |
| 233 | # Get URLs and their archive status from response. |
| 234 | url_info = {} |
| 235 | for repo in repo_data: |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 236 | try: |
| 237 | url_info[repo["clone_url"]] = repo["archived"] |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 238 | except KeyError: |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 239 | logger.error("Failed to get archived status of {}".format(repo)) |
| 240 | url_info[repo["clone_url"]] = False |
| 241 | continue |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 242 | logger.debug(url_info) |
| 243 | repo_count = len(url_info) |
| 244 | logger.info("Number of repositories (Including archived): " + str(repo_count)) |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 245 | |
| 246 | # Clone repository and run unit test. |
| 247 | coverage_report = [] |
| 248 | counter = 0 |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 249 | tested_report_count = 0 |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 250 | coverage_count = 0 |
| 251 | unit_test_count = 0 |
| 252 | no_report_count = 0 |
| 253 | error_count = 0 |
Anusha Dathatri | a756e8a | 2020-03-05 06:48:56 -0600 | [diff] [blame] | 254 | skip_count = 0 |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 255 | archive_count = 0 |
| 256 | url_list = sorted(url_info) |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 257 | for url in url_list: |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 258 | ut_status = "NO" |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 259 | skip = False |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 260 | if url_info[url]: |
| 261 | ut_status = "ARCHIVED" |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 262 | skip = True |
Anusha Dathatri | a756e8a | 2020-03-05 06:48:56 -0600 | [diff] [blame] | 263 | else: |
Anusha Dathatri | a756e8a | 2020-03-05 06:48:56 -0600 | [diff] [blame] | 264 | try: |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 265 | # Eg: url = "https://github.com/openbmc/u-boot.git" |
| 266 | # sandbox_name = "u-boot" |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 267 | sandbox_name = ( |
| 268 | url.strip().split("/")[-1].split(";")[0].split(".")[0] |
| 269 | ) |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 270 | except IndexError as e: |
Anusha Dathatri | 58cbe5f | 2020-04-20 08:31:22 -0500 | [diff] [blame] | 271 | logger.error("ERROR: Unable to get sandbox name for url " + url) |
| 272 | logger.error("Reason: " + str(e)) |
| 273 | continue |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 274 | |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 275 | if sandbox_name in skip_list or re.match(r"meta-", sandbox_name): |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 276 | logger.debug("SKIPPING: " + sandbox_name) |
| 277 | skip = True |
| 278 | ut_status = "SKIPPED" |
| 279 | else: |
| 280 | checkout_cmd = "rm -rf " + sandbox_name + ";git clone " + url |
| 281 | try: |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 282 | subprocess.check_output( |
| 283 | checkout_cmd, |
| 284 | shell=True, |
| 285 | cwd=working_dir, |
| 286 | stderr=subprocess.STDOUT, |
| 287 | ) |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 288 | except subprocess.CalledProcessError as e: |
| 289 | logger.debug(e.output) |
| 290 | logger.debug(e.cmd) |
| 291 | logger.debug("Failed to clone " + sandbox_name) |
| 292 | ut_status = "ERROR" |
| 293 | skip = True |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 294 | if not (skip): |
| 295 | docker_cmd = ( |
| 296 | "WORKSPACE=$(pwd) UNIT_TEST_PKG=" |
| 297 | + sandbox_name |
| 298 | + " " |
| 299 | + "./openbmc-build-scripts/run-unit-test-docker.sh" |
| 300 | ) |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 301 | try: |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 302 | result = subprocess.check_output( |
| 303 | docker_cmd, |
| 304 | cwd=working_dir, |
| 305 | shell=True, |
| 306 | stderr=subprocess.STDOUT, |
| 307 | ) |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 308 | logger.debug(result) |
| 309 | logger.debug("UT BUILD COMPLETED FOR: " + sandbox_name) |
| 310 | |
| 311 | except subprocess.CalledProcessError as e: |
| 312 | logger.debug(e.output) |
| 313 | logger.debug(e.cmd) |
| 314 | logger.debug("UT BUILD EXITED FOR: " + sandbox_name) |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 315 | ut_status = "ERROR" |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 316 | |
| 317 | folder_name = os.path.join(working_dir, sandbox_name) |
| 318 | repo_report_dir = os.path.join(report_dir, sandbox_name) |
| 319 | |
Anusha Dathatri | 544d83a | 2020-03-12 06:07:57 -0500 | [diff] [blame] | 320 | report_names = ("coveragereport", "test-suite.log", "LastTest.log") |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 321 | find_cmd = "".join( |
| 322 | "find " + folder_name + " -name " + report + ";" |
| 323 | for report in report_names |
| 324 | ) |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 325 | try: |
| 326 | result = subprocess.check_output(find_cmd, shell=True) |
Anusha Dathatri | 5ef836e | 2021-05-10 01:06:06 -0500 | [diff] [blame] | 327 | result = result.decode("utf-8") |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 328 | except subprocess.CalledProcessError as e: |
| 329 | logger.debug(e.output) |
| 330 | logger.debug(e.cmd) |
| 331 | logger.debug("CMD TO FIND REPORT FAILED FOR: " + sandbox_name) |
| 332 | ut_status = "ERROR" |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 333 | |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 334 | if ut_status != "ERROR": |
| 335 | if result: |
| 336 | if result.__contains__("coveragereport"): |
| 337 | ut_status = "YES, COVERAGE" |
| 338 | coverage_count += 1 |
| 339 | elif "test-suite.log" in result: |
| 340 | ut_status = "YES, UNIT TEST" |
| 341 | unit_test_count += 1 |
| 342 | elif "LastTest.log" in result: |
| 343 | file_names = result.splitlines() |
| 344 | for file in file_names: |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 345 | pattern_count_cmd = ( |
| 346 | "sed -n '/Start testing/,/End testing/p;' " |
| 347 | + file |
| 348 | + "|wc -l" |
| 349 | ) |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 350 | try: |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 351 | num_of_lines = subprocess.check_output( |
| 352 | pattern_count_cmd, shell=True |
| 353 | ) |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 354 | except subprocess.CalledProcessError as e: |
| 355 | logger.debug(e.output) |
| 356 | logger.debug(e.cmd) |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 357 | logger.debug( |
| 358 | "CONTENT CHECK FAILED FOR: " + sandbox_name |
| 359 | ) |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 360 | ut_status = "ERROR" |
| 361 | |
| 362 | if int(num_of_lines.strip()) > 5: |
| 363 | ut_status = "YES, UNIT TEST" |
| 364 | unit_test_count += 1 |
| 365 | |
| 366 | if "YES" in ut_status: |
| 367 | tested_report_count += 1 |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 368 | result = result.splitlines() |
| 369 | for file_path in result: |
Patrick Williams | e08ffba | 2022-12-05 10:33:46 -0600 | [diff] [blame^] | 370 | destination = os.path.dirname( |
| 371 | os.path.join( |
| 372 | report_dir, os.path.relpath(file_path, working_dir) |
| 373 | ) |
| 374 | ) |
| 375 | copy_cmd = ( |
| 376 | "mkdir -p " |
| 377 | + destination |
| 378 | + ";cp -rf " |
| 379 | + file_path.strip() |
| 380 | + " " |
| 381 | + destination |
| 382 | ) |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 383 | try: |
| 384 | subprocess.check_output(copy_cmd, shell=True) |
| 385 | except subprocess.CalledProcessError as e: |
| 386 | logger.debug(e.output) |
| 387 | logger.debug(e.cmd) |
| 388 | logger.info("FAILED TO COPY REPORTS FOR: " + sandbox_name) |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 389 | |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 390 | if ut_status == "ERROR": |
| 391 | error_count += 1 |
| 392 | elif ut_status == "NO": |
| 393 | no_report_count += 1 |
| 394 | elif ut_status == "SKIPPED": |
| 395 | skip_count += 1 |
| 396 | elif ut_status == "ARCHIVED": |
| 397 | archive_count += 1 |
| 398 | |
| 399 | coverage_report.append("{:<65}{:<10}".format(url.strip(), ut_status)) |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 400 | counter += 1 |
| 401 | logger.info(str(counter) + " in " + str(repo_count) + " completed") |
| 402 | |
| 403 | logger.info("*" * 30 + "UNIT TEST COVERAGE REPORT" + "*" * 30) |
| 404 | for res in coverage_report: |
| 405 | logger.info(res) |
| 406 | logger.info("*" * 30 + "UNIT TEST COVERAGE REPORT" + "*" * 30) |
| 407 | |
| 408 | logger.info("REPORTS: " + report_dir) |
| 409 | logger.info("LOGS: " + log_dir) |
| 410 | logger.info("*" * 85) |
| 411 | logger.info("SUMMARY: ") |
| 412 | logger.info("TOTAL REPOSITORIES : " + str(repo_count)) |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 413 | logger.info("TESTED REPOSITORIES : " + str(tested_report_count)) |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 414 | logger.info("ERROR : " + str(error_count)) |
| 415 | logger.info("COVERAGE REPORT : " + str(coverage_count)) |
| 416 | logger.info("UNIT TEST REPORT : " + str(unit_test_count)) |
| 417 | logger.info("NO REPORT : " + str(no_report_count)) |
Anusha Dathatri | a756e8a | 2020-03-05 06:48:56 -0600 | [diff] [blame] | 418 | logger.info("SKIPPED : " + str(skip_count)) |
Anusha Dathatri | 185d5b5 | 2020-03-19 04:15:10 -0500 | [diff] [blame] | 419 | logger.info("ARCHIVED : " + str(archive_count)) |
Anusha Dathatri | 229b76a | 2019-11-26 03:26:16 -0600 | [diff] [blame] | 420 | logger.info("*" * 85) |