Log collector module

New:
  - [Done] multiple namespace selector
  - [Done] keyword-based pod selector
  - [Done] per-pod logs syntax detection and parsing
  - [Differed] in-place filtering for shorter logs
  - [Done] individual logs timestamp detection
  - [Done] Unix time bases Timestamp sorting
  - [Done] Single file logs output using common format
  - [Done] add all log types from all MOS namespaces and pods

Update:
  - resource preparation can be skipped per module
  - updated log collection using multiple threads
  - new setting LOG_COLLECT_THREADS

Fixes:
  - Network MTU fix
  - Faster cmd execution on single pod
  - Ceph benchmark validations
  - Ceph benchmark report sorting
  - Daemonset deployment with nodes skipped
  - Network tree debugging script
  - Tree depth limiter, i.e. stackoverflow prevention

  Related-PROD: PROD-36845

Change-Id: Icf229ac62078c6418ab4dbdff12b0d27ed42af1d
diff --git a/cfg_checker/modules/logs/__init__.py b/cfg_checker/modules/logs/__init__.py
new file mode 100644
index 0000000..8003e54
--- /dev/null
+++ b/cfg_checker/modules/logs/__init__.py
@@ -0,0 +1,122 @@
+#    Author: Alex Savatieiev (osavatieiev@mirantis.com; a.savex@gmail.com)
+#    Copyright 2019-2022 Mirantis, Inc.
+import os
+
+from cfg_checker.common import logger_cli
+from cfg_checker.common.settings import ENV_TYPE_KUBE
+from cfg_checker.helpers import args_utils
+from cfg_checker.modules.logs import sage
+
+command_help = "Logs collecting and organizing"
+supported_envs = [ENV_TYPE_KUBE]
+
+
+def init_parser(_parser):
+    # network subparser
+    logs_subparsers = _parser.add_subparsers(dest='type')
+
+    collect_parser = logs_subparsers.add_parser(
+        'collect',
+        help="Collect logs according to filters and/or given criteria"
+    )
+
+    collect_parser.add_argument(
+        '--ns',
+        metavar='namespace',
+        action="append",
+        help="Namespace to get pods from. Can be used multiple times"
+    )
+
+    collect_parser.add_argument(
+        '--pod-mask',
+        metavar='pod_mask',
+        action="append",
+        help="Mask/Keyword to filter pods. Can be used multiple times"
+    )
+
+    collect_parser.add_argument(
+        '--pods-inclusive',
+        action="store_true", default=True,
+        help="Inclusive pod mask filtering, "
+             "i.e. OR for filters for 'True' or AND for 'False"
+    )
+
+    collect_parser.add_argument(
+        '--file',
+        metavar='logs_filename',
+        help="Filename for logs to be saved to"
+    )
+
+    collect_parser.add_argument(
+        '--exclude',
+        metavar='exclude_mask',
+        action="append",
+        help="Mask/Keyword to exclude pods from final results. "
+             "Can be used multiple times"
+    )
+
+    collect_parser.add_argument(
+        '--dump-undetected',
+        metavar="dumppath", default="null",
+        help="Give dump path to store not parser log lines separatelly. "
+             "Default: null"
+    )
+
+    collect_parser.add_argument(
+        '--tail',
+        metavar='tail', default=50,
+        help="Number of lines to capture. Default: 50"
+    )
+
+    return _parser
+
+
+def do_collect(args, config):
+    # Ceph info
+    # Gather ceph info and create an archive with data
+    args_utils.check_supported_env(ENV_TYPE_KUBE, args, config)
+    # check tgz
+    _logsfile = "mos_logs.log" if not args.file else args.file
+    logger_cli.info("# Output file is '{}'".format(_logsfile))
+
+    # _class = _selectClass(_env)
+    config.prepare_qa_resources = False
+    # path to dump logs that are not detected by any regex
+    config.dumppath = args_utils.get_arg(args, "dump_undetected")
+    if config.dumppath != "null" and \
+       not os.path.exists(config.dumppath):
+        logger_cli.error(
+            "ERROR: Path to dump not parsable logs not found: '{}'".format(
+                config.dumppath
+            )
+        )
+        return
+    config.tail_lines = args_utils.get_arg(args, "tail")
+    ml = sage.KubeMosLogger(config)
+
+    # namespaces = ["openstack", "stacklight"]
+    # pod_masks = ["alerta", "nova-api"]
+    namespaces = args_utils.get_arg(args, "ns")
+    pod_masks = args_utils.get_arg(args, "pod_mask")
+    pods_inclusive = args_utils.get_arg(args, "pods_inclusive")
+    exclude_keywords = args_utils.get_arg(args, "exclude")
+    exclude_keywords = exclude_keywords if exclude_keywords else []
+    exclude_keywords += ["cleaner"]
+
+    # Prepare pod names list for log collection
+    _plist = ml.prepare_pods(
+        namespaces,
+        pod_masks,
+        inclusive_filter=pods_inclusive,
+        exclude_kw=exclude_keywords
+    )
+    # Collect logs
+    ml.collect_logs(_plist)
+    # Parse logs
+    ml.parse_logs()
+    # Merge them using timestamp
+    ml.merge_logs()
+    # Save resulting file
+    ml.save_logs(_logsfile)
+
+    return