David Kranz | 852c5c2 | 2013-10-04 15:10:15 -0400 | [diff] [blame] | 1 | #!/usr/bin/env python |
| 2 | # vim: tabstop=4 shiftwidth=4 softtabstop=4 |
| 3 | |
| 4 | # Copyright 2013 Red Hat, Inc. |
| 5 | # All Rights Reserved. |
| 6 | # |
| 7 | # Licensed under the Apache License, Version 2.0 (the "License"); you may |
| 8 | # not use this file except in compliance with the License. You may obtain |
| 9 | # a copy of the License at |
| 10 | # |
| 11 | # http://www.apache.org/licenses/LICENSE-2.0 |
| 12 | # |
| 13 | # Unless required by applicable law or agreed to in writing, software |
| 14 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT |
| 15 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the |
| 16 | # License for the specific language governing permissions and limitations |
| 17 | # under the License. |
| 18 | |
David Kranz | e8e2631 | 2013-10-09 21:31:32 -0400 | [diff] [blame] | 19 | import argparse |
| 20 | import gzip |
| 21 | import os |
| 22 | import re |
| 23 | import StringIO |
David Kranz | 852c5c2 | 2013-10-04 15:10:15 -0400 | [diff] [blame] | 24 | import sys |
David Kranz | e8e2631 | 2013-10-09 21:31:32 -0400 | [diff] [blame] | 25 | import urllib2 |
| 26 | import yaml |
| 27 | |
| 28 | |
| 29 | def process_files(file_specs, url_specs, whitelists): |
| 30 | regexp = re.compile(r"^.*(ERROR|CRITICAL).*\[.*\-.*\]") |
| 31 | had_errors = False |
| 32 | for (name, filename) in file_specs: |
| 33 | whitelist = whitelists.get(name, []) |
| 34 | with open(filename) as content: |
| 35 | if scan_content(name, content, regexp, whitelist): |
| 36 | had_errors = True |
| 37 | for (name, url) in url_specs: |
| 38 | whitelist = whitelists.get(name, []) |
| 39 | req = urllib2.Request(url) |
| 40 | req.add_header('Accept-Encoding', 'gzip') |
| 41 | page = urllib2.urlopen(req) |
| 42 | buf = StringIO.StringIO(page.read()) |
| 43 | f = gzip.GzipFile(fileobj=buf) |
| 44 | if scan_content(name, f.read().splitlines(), regexp, whitelist): |
| 45 | had_errors = True |
| 46 | return had_errors |
| 47 | |
| 48 | |
| 49 | def scan_content(name, content, regexp, whitelist): |
| 50 | had_errors = False |
| 51 | for line in content: |
| 52 | if not line.startswith("Stderr:") and regexp.match(line): |
| 53 | whitelisted = False |
| 54 | for w in whitelist: |
| 55 | pat = ".*%s.*%s.*" % (w['module'].replace('.', '\\.'), |
| 56 | w['message']) |
| 57 | if re.match(pat, line): |
| 58 | whitelisted = True |
| 59 | break |
| 60 | if not whitelisted: |
| 61 | if not had_errors: |
| 62 | print("Log File: %s" % name) |
| 63 | had_errors = True |
| 64 | print(line) |
| 65 | return had_errors |
| 66 | |
| 67 | |
| 68 | def collect_url_logs(url): |
| 69 | page = urllib2.urlopen(url) |
| 70 | content = page.read() |
| 71 | logs = re.findall('(screen-[\w-]+\.txt\.gz)</a>', content) |
| 72 | return logs |
| 73 | |
| 74 | |
| 75 | def main(opts): |
| 76 | if opts.directory and opts.url or not (opts.directory or opts.url): |
| 77 | print("Must provide exactly one of -d or -u") |
| 78 | exit(1) |
| 79 | print("Checking logs...") |
| 80 | WHITELIST_FILE = os.path.join( |
| 81 | os.path.abspath(os.path.dirname(os.path.dirname(__file__))), |
| 82 | "etc", "whitelist.yaml") |
| 83 | |
| 84 | file_matcher = re.compile(r".*screen-([\w-]+)\.log") |
| 85 | files = [] |
| 86 | if opts.directory: |
| 87 | d = opts.directory |
| 88 | for f in os.listdir(d): |
| 89 | files.append(os.path.join(d, f)) |
| 90 | files_to_process = [] |
| 91 | for f in files: |
| 92 | m = file_matcher.match(f) |
| 93 | if m: |
| 94 | files_to_process.append((m.group(1), f)) |
| 95 | |
| 96 | url_matcher = re.compile(r".*screen-([\w-]+)\.txt\.gz") |
| 97 | urls = [] |
| 98 | if opts.url: |
| 99 | for logfile in collect_url_logs(opts.url): |
| 100 | urls.append("%s/%s" % (opts.url, logfile)) |
| 101 | urls_to_process = [] |
| 102 | for u in urls: |
| 103 | m = url_matcher.match(u) |
| 104 | if m: |
| 105 | urls_to_process.append((m.group(1), u)) |
| 106 | |
| 107 | whitelists = {} |
| 108 | with open(WHITELIST_FILE) as stream: |
| 109 | loaded = yaml.safe_load(stream) |
| 110 | if loaded: |
| 111 | for (name, l) in loaded.iteritems(): |
| 112 | for w in l: |
| 113 | assert 'module' in w, 'no module in %s' % name |
| 114 | assert 'message' in w, 'no message in %s' % name |
| 115 | whitelists = loaded |
| 116 | if process_files(files_to_process, urls_to_process, whitelists): |
| 117 | print("Logs have errors") |
| 118 | # Return non-zero to start failing builds |
| 119 | return 0 |
| 120 | else: |
| 121 | print("ok") |
| 122 | return 0 |
| 123 | |
| 124 | usage = """ |
| 125 | Find non-white-listed log errors in log files from a devstack-gate run. |
| 126 | Log files will be searched for ERROR or CRITICAL messages. If any |
| 127 | error messages do not match any of the whitelist entries contained in |
| 128 | etc/whitelist.yaml, those messages will be printed to the console and |
| 129 | failure will be returned. A file directory containing logs or a url to the |
| 130 | log files of an OpenStack gate job can be provided. |
| 131 | |
| 132 | The whitelist yaml looks like: |
| 133 | |
| 134 | log-name: |
| 135 | - module: "a.b.c" |
| 136 | message: "regexp" |
| 137 | - module: "a.b.c" |
| 138 | message: "regexp" |
| 139 | |
| 140 | repeated for each log file with a whitelist. |
| 141 | """ |
| 142 | |
| 143 | parser = argparse.ArgumentParser(description=usage) |
| 144 | parser.add_argument('-d', '--directory', |
| 145 | help="Directory containing log files") |
| 146 | parser.add_argument('-u', '--url', |
| 147 | help="url containing logs from an OpenStack gate job") |
David Kranz | 852c5c2 | 2013-10-04 15:10:15 -0400 | [diff] [blame] | 148 | |
| 149 | if __name__ == "__main__": |
David Kranz | e8e2631 | 2013-10-09 21:31:32 -0400 | [diff] [blame] | 150 | try: |
| 151 | sys.exit(main(parser.parse_args())) |
| 152 | except Exception as e: |
| 153 | print("Failure in script: %s" % e) |
| 154 | # Don't fail if there is a problem with the script. |
| 155 | sys.exit(0) |