Alex | 0bcf31b | 2022-03-29 17:38:58 -0500 | [diff] [blame] | 1 | # Author: Alex Savatieiev (osavatieiev@mirantis.com; a.savex@gmail.com) |
| 2 | # Copyright 2019-2022 Mirantis, Inc. |
| 3 | import os |
| 4 | |
| 5 | from cfg_checker.common import logger_cli |
| 6 | from cfg_checker.common.settings import ENV_TYPE_KUBE |
| 7 | from cfg_checker.helpers import args_utils |
| 8 | from cfg_checker.modules.logs import sage |
| 9 | |
| 10 | command_help = "Logs collecting and organizing" |
| 11 | supported_envs = [ENV_TYPE_KUBE] |
| 12 | |
| 13 | |
| 14 | def init_parser(_parser): |
| 15 | # network subparser |
| 16 | logs_subparsers = _parser.add_subparsers(dest='type') |
| 17 | |
| 18 | collect_parser = logs_subparsers.add_parser( |
| 19 | 'collect', |
| 20 | help="Collect logs according to filters and/or given criteria" |
| 21 | ) |
| 22 | |
| 23 | collect_parser.add_argument( |
| 24 | '--ns', |
| 25 | metavar='namespace', |
| 26 | action="append", |
| 27 | help="Namespace to get pods from. Can be used multiple times" |
| 28 | ) |
| 29 | |
| 30 | collect_parser.add_argument( |
| 31 | '--pod-mask', |
| 32 | metavar='pod_mask', |
| 33 | action="append", |
| 34 | help="Mask/Keyword to filter pods. Can be used multiple times" |
| 35 | ) |
| 36 | |
| 37 | collect_parser.add_argument( |
| 38 | '--pods-inclusive', |
| 39 | action="store_true", default=True, |
| 40 | help="Inclusive pod mask filtering, " |
| 41 | "i.e. OR for filters for 'True' or AND for 'False" |
| 42 | ) |
| 43 | |
| 44 | collect_parser.add_argument( |
| 45 | '--file', |
| 46 | metavar='logs_filename', |
| 47 | help="Filename for logs to be saved to" |
| 48 | ) |
| 49 | |
| 50 | collect_parser.add_argument( |
| 51 | '--exclude', |
| 52 | metavar='exclude_mask', |
| 53 | action="append", |
| 54 | help="Mask/Keyword to exclude pods from final results. " |
| 55 | "Can be used multiple times" |
| 56 | ) |
| 57 | |
| 58 | collect_parser.add_argument( |
| 59 | '--dump-undetected', |
| 60 | metavar="dumppath", default="null", |
| 61 | help="Give dump path to store not parser log lines separatelly. " |
| 62 | "Default: null" |
| 63 | ) |
| 64 | |
| 65 | collect_parser.add_argument( |
| 66 | '--tail', |
| 67 | metavar='tail', default=50, |
| 68 | help="Number of lines to capture. Default: 50" |
| 69 | ) |
| 70 | |
| 71 | return _parser |
| 72 | |
| 73 | |
| 74 | def do_collect(args, config): |
| 75 | # Ceph info |
| 76 | # Gather ceph info and create an archive with data |
| 77 | args_utils.check_supported_env(ENV_TYPE_KUBE, args, config) |
| 78 | # check tgz |
| 79 | _logsfile = "mos_logs.log" if not args.file else args.file |
| 80 | logger_cli.info("# Output file is '{}'".format(_logsfile)) |
| 81 | |
| 82 | # _class = _selectClass(_env) |
| 83 | config.prepare_qa_resources = False |
| 84 | # path to dump logs that are not detected by any regex |
| 85 | config.dumppath = args_utils.get_arg(args, "dump_undetected") |
| 86 | if config.dumppath != "null" and \ |
| 87 | not os.path.exists(config.dumppath): |
| 88 | logger_cli.error( |
| 89 | "ERROR: Path to dump not parsable logs not found: '{}'".format( |
| 90 | config.dumppath |
| 91 | ) |
| 92 | ) |
| 93 | return |
| 94 | config.tail_lines = args_utils.get_arg(args, "tail") |
| 95 | ml = sage.KubeMosLogger(config) |
| 96 | |
| 97 | # namespaces = ["openstack", "stacklight"] |
| 98 | # pod_masks = ["alerta", "nova-api"] |
| 99 | namespaces = args_utils.get_arg(args, "ns") |
| 100 | pod_masks = args_utils.get_arg(args, "pod_mask") |
| 101 | pods_inclusive = args_utils.get_arg(args, "pods_inclusive") |
| 102 | exclude_keywords = args_utils.get_arg(args, "exclude") |
| 103 | exclude_keywords = exclude_keywords if exclude_keywords else [] |
| 104 | exclude_keywords += ["cleaner"] |
| 105 | |
| 106 | # Prepare pod names list for log collection |
| 107 | _plist = ml.prepare_pods( |
| 108 | namespaces, |
| 109 | pod_masks, |
| 110 | inclusive_filter=pods_inclusive, |
| 111 | exclude_kw=exclude_keywords |
| 112 | ) |
| 113 | # Collect logs |
| 114 | ml.collect_logs(_plist) |
| 115 | # Parse logs |
| 116 | ml.parse_logs() |
| 117 | # Merge them using timestamp |
| 118 | ml.merge_logs() |
| 119 | # Save resulting file |
| 120 | ml.save_logs(_logsfile) |
| 121 | |
| 122 | return |