Updates for ceph module regarding info gathering

   Related-PROD: PROD-36845

Change-Id: I1903af0c3ff8bc9c7d047c06917fc5bffa733224
diff --git a/cfg_checker/modules/ceph/__init__.py b/cfg_checker/modules/ceph/__init__.py
index 29b9a29..e2f0049 100644
--- a/cfg_checker/modules/ceph/__init__.py
+++ b/cfg_checker/modules/ceph/__init__.py
@@ -1,5 +1,7 @@
 #    Author: Alex Savatieiev (osavatieiev@mirantis.com; a.savex@gmail.com)
 #    Copyright 2019-2022 Mirantis, Inc.
+from datetime import datetime
+
 from cfg_checker.agent.fio_runner import get_fio_options
 from cfg_checker.agent.fio_runner import seq_modes, mix_modes
 from cfg_checker.common import logger_cli
@@ -7,6 +9,7 @@
 from cfg_checker.helpers import args_utils
 from cfg_checker.modules.ceph import info, bench
 
+
 command_help = "Ceph Storage information and benchmarks"
 supported_envs = [ENV_TYPE_KUBE]
 
@@ -54,17 +57,18 @@
     )
 
     ceph_info_parser.add_argument(
-        '--tgz',
-        metavar='ceph_tgz_filename',
-        help="TGZ archive filename to save gathered data"
+        '--client-name',
+        metavar='client_name',
+        help="Client name for archive naming"
     )
 
-    ceph_report_parser = ceph_subparsers.add_parser(
-        'report',
-        help="Generate Ceph Info report"
+    ceph_info_parser.add_argument(
+        '--project-name',
+        metavar='projectname',
+        help="Project name for archive naming"
     )
 
-    ceph_report_parser.add_argument(
+    ceph_info_parser.add_argument(
         '--html',
         metavar='ceph_html_filename',
         help="HTML filename to save report"
@@ -179,11 +183,22 @@
     # Ceph info
     # Gather ceph info and create an archive with data
     args_utils.check_supported_env(ENV_TYPE_KUBE, args, config)
-    # check tgz
-    _tgzfile = "ceph_info_archive.tgz" if not args.tgz else args.tgz
 
+    # check client and project names
+    if not args.client_name or not args.project_name:
+        logger_cli.error(
+            "ERROR: Missing '--client-name' or '--project-name' options"
+        )
+        return
     # _class = _selectClass(_env)
     ceph_info = info.KubeCephInfo(config)
+    _tgzfilename = ceph_info.get_info_archive_filename(
+        args.client_name,
+        args.project_name
+    )
+    logger_cli.info("# Archive will be generated to '{}'".format(_tgzfilename))
+    # get html
+    _htmlfilename = args_utils.get_arg(args, 'html')
 
     logger_cli.info("# Collecting Ceph cluster information")
     ceph_info.gather_info()
@@ -195,30 +210,13 @@
     # ceph_info.load_info()
     # end debug
 
-    ceph_info.generate_archive(_tgzfile)
-    ceph_info.print_summary()
-
-    return
-
-
-def do_report(args, config):
-    # Ceph Report
-    # Gather ceph info and create HTML report with all of the data
-    args_utils.check_supported_env(ENV_TYPE_KUBE, args, config)
-    _filename = args_utils.get_arg(args, 'html')
-    logger_cli.info("# Ceph cluster Configuration report")
-
-    # _class = _selectClass(_env)
-    ceph_info = info.KubeCephInfo(config)
-    # Debug, enable if needed to debug report generation
-    # without actuall data collecting each time
-    # ceph_info.load_info()
-    # end debug
-    ceph_info.gather_info()
-    ceph_info.gather_osd_configs()
     ceph_info.get_transposed_latency_table()
     ceph_info.get_latest_health_readout()
-    ceph_info.create_html_report(_filename)
+    ceph_info.create_html_report(_htmlfilename)
+
+    # handle cli part
+    ceph_info.generate_archive(_tgzfilename)
+    ceph_info.print_summary()
 
     return
 
diff --git a/cfg_checker/modules/ceph/info.py b/cfg_checker/modules/ceph/info.py
index 0eb1f15..a3006af 100644
--- a/cfg_checker/modules/ceph/info.py
+++ b/cfg_checker/modules/ceph/info.py
@@ -6,7 +6,7 @@
 import tarfile
 import io
 from time import sleep
-
+from datetime import datetime
 
 from cfg_checker.common import logger_cli
 from cfg_checker.common.exception import KubeException
@@ -25,6 +25,22 @@
         self.env_config = config
         return
 
+    def get_info_archive_filename(self, client, project):
+        # prefill known data
+        _tags = ["CephCollectData"]
+        _tags.append(client)
+        _tags.append(project)
+
+        # generate date for tgz
+        _file_datetime_fmt = "%Y-%m-%d"
+        _dt = datetime.now().strftime(_file_datetime_fmt)
+        _tags.append(_dt)
+
+        # extension
+        _tags.append("tar")
+        _tags.append("gz")
+        return ".".join(_tags)
+
     def get_transposed_latency_table(self):
         _table = {
             "<dev>": []
@@ -164,6 +180,9 @@
             self.ceph_info = json.load(_f)
 
     def generate_archive(self, tgzfilename):
+        def _ensure_fname(ext):
+            return key + ext if _fname is None else _fname
+
         if not self.ceph_info:
             logger_cli.warning(
                 "WARNING: Ceph Info Data not detected. "
@@ -178,18 +197,23 @@
             )
             # Iterate every key and write data to tar file
             for key, d in self.ceph_info.items():
-                _filename = None
+                _fname = None
                 # Cast buf to a proper type
                 _buf = None
+                if "filename" in d:
+                    _fname = d["filename"]
                 if isinstance(d["data"], dict) or isinstance(d["data"], list):
                     _buf = json.dumps(d["data"], indent=2)
-                    _filename = key + ".json"
+                    # _filename = key + ".json" if _fname is not None else _fname
+                    _filename = _ensure_fname(".json")
                 elif isinstance(d["data"], str):
                     _buf = d["data"]
-                    _filename = key + ".txt"
+                    # _filename = key + ".txt"
+                    _filename = _ensure_fname(".txt")
                 else:
                     _buf = str(d["data"])
-                    _filename = key + ".txt"
+                    # _filename = key + ".txt"
+                    _filename = _ensure_fname(".txt")
                 logger_cli.debug("... writing '{}'".format(_filename))
                 _tgz.add_file(_filename, buf=_buf, replace=True)
 
@@ -332,7 +356,8 @@
             logger_cli.debug("... found '{}'".format(_names[0]))
         return _names[0]
 
-    def _add_ceph_info_item(self, key, title, data):
+    def _add_ceph_info_item(self, key, title, data, filename=None):
+        # handle data
         if key in self.ceph_info:
             self.ceph_info[key]["title"] = title
             self.ceph_info[key]["data"] = data
@@ -341,6 +366,8 @@
                 "title": title,
                 "data": data
             }
+        if filename:
+            self.ceph_info[key]["filename"] = filename
 
     def _parse_dev_classes(self, deviceClasses):
         _devClasses = []
@@ -439,13 +466,15 @@
         self._add_ceph_info_item(
             "crushmap_json",
             "Crush Map (json)",
-            _cj("crushtool -i " + _cmap_tmp_path + " --dump")
+            _cj("crushtool -i " + _cmap_tmp_path + " --dump"),
+            filename="crushmap.json"
         )
         # _crushmap = _cj("crushtool -i " + _cmap_tmp_path + " --dump")
         self._add_ceph_info_item(
             "crushmap_text",
             "Crush Map (text)",
-            _c("crushtool -d " + _cmap_tmp_path)
+            _c("crushtool -d " + _cmap_tmp_path),
+            filename="crushmap.json"
         )
 
         logger_cli.info("-> Collecting ceph osd crush dump")