save_jenkins_console_output scripts
Change-Id: Ia109a9b9be325a416aec88d077ec29afa40781f7
Related-PROD: PROD-30280
diff --git a/save_jenkins_console/README.md b/save_jenkins_console/README.md
new file mode 100644
index 0000000..90a6a53
--- /dev/null
+++ b/save_jenkins_console/README.md
@@ -0,0 +1,5 @@
+# jenkins_console_output
+
+Scrips get Jenkins's consoles outputs from jobs and save it to files.
+Possible to archive old files.
+Delete old archives
\ No newline at end of file
diff --git a/save_jenkins_console/base.py b/save_jenkins_console/base.py
new file mode 100644
index 0000000..c42c7f9
--- /dev/null
+++ b/save_jenkins_console/base.py
@@ -0,0 +1,12 @@
+import time
+
+from config import JOBS_FOR_GETTING_LOGS_FROM_OUTPUT
+from jenkins_api import gathering_data_from_jenkins_all_jenkins_job
+
+
+def main():
+ gathering_data_from_jenkins_all_jenkins_job(all_jenkins_job=JOBS_FOR_GETTING_LOGS_FROM_OUTPUT)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/save_jenkins_console/console_get_reports.sh b/save_jenkins_console/console_get_reports.sh
new file mode 100755
index 0000000..05eaf5a
--- /dev/null
+++ b/save_jenkins_console/console_get_reports.sh
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+source /home/ubuntu/programs/jenkins_console_output/.venv/bin/activate
+python /home/ubuntu/programs/jenkins_console_output/genetate_report_html.py
+rm -fr /tmp/oscore_qa_logs/*
diff --git a/save_jenkins_console/requirements.txt b/save_jenkins_console/requirements.txt
new file mode 100644
index 0000000..228c7cc
--- /dev/null
+++ b/save_jenkins_console/requirements.txt
@@ -0,0 +1,7 @@
+jenkinsapi>=0.3.6
+ipdb>=0.11
+requests>=2.21.0
+tinydb>=3.12.2
+jinja2>=2.10
+gevent>=1.4.0
+mysql-connector-python>=8.0.15
\ No newline at end of file
diff --git a/save_jenkins_console/save_jenkins_console/config.py b/save_jenkins_console/save_jenkins_console/config.py
new file mode 100644
index 0000000..a582c5b
--- /dev/null
+++ b/save_jenkins_console/save_jenkins_console/config.py
@@ -0,0 +1,55 @@
+# Jenkins API credantials
+USERNAME = 'mcp-oscore-jenkins'
+PASSWORD = 'ahvoNg4mae'
+JENKINS_URL = 'https://ci.mcp.mirantis.net'
+
+# Jenkins jobs names for https://ci.mcp.mirantis.net
+# For example https://ci.mcp.mirantis.net/oscore-tempest-runner
+JOBS_FOR_GETTING_LOGS_FROM_OUTPUT = ['oscore-tempest-runner',
+ 'oscore-oscc-ci',
+ 'oscore-test-cookiecutter-model',
+ 'rollout-mcp-env',
+ 'oscore-promote-openstack-pike-xenial',
+ 'oscore-promote-openstack-queens-xenial',
+ 'oscore-tempest-results-checker',
+ 'run-deploy-job-on-cfg01',
+ 'deploy-heat-virtual_mcp11_aio',
+ 'oscore-artifatcts-collector',
+ 'oscore-formula-systest-virtual_mcp11_aio-queens',
+ 'oscore-formula-systest-virtual_mcp11_aio-newton',
+ 'oscore-formula-systest-virtual_mcp11_aio-pike',
+ 'oscore-formula-systest-virtual_mcp11_aio-mitaka',
+ 'oscore-test-openstack-upgrade-pike-queens-core-ssl',
+ 'oscore-test-openstack-upgrade-ocata-pike-core',
+ 'oscore-test-openstack-upgrade-pike-queens-core-extra-ssl',
+ 'oscore-test-openstack-upgrade-pike-queens-core-barbican',
+ 'create-heat-stack-for-mcp-env',
+ ]
+
+# For database_to_files.py script
+# Gets data from databases ans stores them to each directory test file
+# where to unpack logs from db
+
+# LOGS_DIRECTORY = '/var/www/oscore_jobs.com/html/oscore_logs/'
+LOGS_DIRECTORY = '/home/serhii/my_projects/jenkins_console_output/logs/'
+
+# files older than in days
+FILES_OLDER_THAN = 33
+# directory where databases are
+
+# Parameters for create_index_html.py
+PATCH_TO_INDEX_TEMPLATE_HTML = '/home/ubuntu/programs/jenkins_console_output/templates/index_template.html'
+PATCH_TO_OUTPUT_INDEX_HTML = '/var/www/oscore_jobs.com/html/index.html'
+
+# Parameters for create_html_report_table.py
+PATCH_TO_REPORT_TABLE_TEMPLATE_HTML = '/home/ubuntu/programs/jenkins_console_output/templates/report_template.html'
+PATCH_TO_OUTPUT_REPORT_TABLE_HTML = '/tmp/output_report.html'
+
+# For logging all the data what is happening in this script
+LOGGIGNG_FOLDER = '/tmp/oscore_qa_logs/'
+LOGGIGNG_CREATE_THML_INDEX = 'create_thml_index.log'
+LOGGIGNG_CREATE_HTML_REPORT_TABLE = 'create_html_report_table.log'
+# LOGGIGNG_DATABASE_TO_FILES = 'database_to_files.log'
+# LOGGIGNG_DELETE_DB_OLD_FILES = 'delete_db_old_files.log'
+LOGGIGNG_JENKINS_API = 'jenkins_api.log'
+LOGGIGNG_UTILS = 'utils.log'
diff --git a/save_jenkins_console/save_jenkins_console/jenkins_api.py b/save_jenkins_console/save_jenkins_console/jenkins_api.py
new file mode 100644
index 0000000..c20c5a0
--- /dev/null
+++ b/save_jenkins_console/save_jenkins_console/jenkins_api.py
@@ -0,0 +1,171 @@
+import config
+import requests
+import time
+import utils
+
+from jenkinsapi import custom_exceptions
+from jenkinsapi.jenkins import Jenkins
+from jenkinsapi.utils.crumb_requester import CrumbRequester
+
+import logging
+
+from config import LOGGIGNG_FOLDER
+from config import LOGGIGNG_JENKINS_API
+from config import LOGS_DIRECTORY
+from config import JOBS_FOR_GETTING_LOGS_FROM_OUTPUT
+
+logging.basicConfig(format='[%(asctime)s][%(name)s][%(levelname)s] %(message)s',
+ datefmt='%d-%m-%Y %H:%M:%S',
+ handlers=[
+ logging.FileHandler('{}{}'.format(LOGGIGNG_FOLDER, LOGGIGNG_JENKINS_API)),
+ logging.StreamHandler()],
+ level=logging.INFO)
+logger = logging.getLogger('jenkins_api')
+
+
+class GetJobsResults:
+ """
+ Working with Jenkins API and tiny DB
+ """
+ def __init__(self, pipeline_job_name):
+ self.server = Jenkins(config.JENKINS_URL,
+ username=config.USERNAME,
+ password=config.PASSWORD,
+ requester=CrumbRequester(
+ username=config.USERNAME,
+ password=config.PASSWORD,
+ baseurl=config.JENKINS_URL))
+ self.job = self.server.get_job(pipeline_job_name)
+ self.pipeline_job_name = pipeline_job_name
+
+
+ def get_all_jobs_ids(self):
+ """
+ Gets all the builds ID's for the particular job name.
+
+ For example:
+ Pipeline_job_name: oscore-artifatcts-collector
+ [6150, 6149, 6148, ..., 6121]
+
+ :return: list. Builds ID's
+ """
+ builds_ids = self.job.get_build_ids()
+ logger.info('Getting all builds ids: {}'.format(self.pipeline_job_name))
+ return list(builds_ids)
+
+ def get_job_build(self, build_id):
+ try:
+ job_build = self.job.get_build(build_id)
+ return job_build
+ except requests.exceptions.HTTPError:
+ logger.warning('404 Client Error: Not Found for url')
+
+
+ def get_build_artifacts(self, jobs_ids_list):
+ """
+ Gets all the build artifacts fir the build in a Jenkins Job
+ Saves it to DB
+
+ :param jobs_ids_list: List of the IDs for the particular job
+ :return: None if Ok
+ """
+ logger.info('Pipeline job name, jobs IDs list: {} {}'.format(self.pipeline_job_name, jobs_ids_list))
+ build_counter = 1
+ for build in jobs_ids_list:
+ patch_to_file = utils.get_patch_to_file(
+ job_name=self.pipeline_job_name, build_id=build)
+ if not utils.check_if_file_exists(patch_to_file=patch_to_file):
+ # If a build ID is not in the DB then add it
+ try:
+ job_build = self.get_job_build(build_id=build)
+
+ # Build status FAILURE, ABORTED, SUCCESS, None
+ # Build status None means 'In progress'
+ job_build_status = job_build.get_status()
+ job_console = job_build.get_console()
+
+ if job_build and job_build_status:
+ # Check if Build status is not None than job is not in progress, finished
+ logger.info("Saving Buid to file: {}: {} build: {} from {}".format(
+ self.pipeline_job_name,
+ build,
+ build_counter,
+ len(jobs_ids_list)))
+ build_counter += 1
+
+ # When job finished
+ job_timestamp = job_build.get_timestamp().timestamp()
+ build_date = utils.get_date_from_timestamp(job_timestamp)
+
+ # Save data th the file
+ utils.save_job_console_to_file(
+ logs_directory=LOGS_DIRECTORY,
+ job_name=self.pipeline_job_name,
+ build_id=build,
+ build_date=build_date,
+ build_status=job_build_status,
+ data_to_write=job_console)
+
+ except custom_exceptions.NotFound:
+ logger.warning("Couldn't find a build: {}: {}".format(self.pipeline_job_name, build))
+ continue
+ continue
+ logger.warning("Jenkins log output already saved: {}: {} build: {} from {}".format(
+ self.pipeline_job_name,
+ build,
+ build_counter,
+ len(jobs_ids_list)))
+ build_counter +=1
+
+ continue
+ return
+
+
+def gathering_data_from_jenkins_all_jenkins_job(all_jenkins_job):
+ """
+ Gets console output from Jenkins jobs and save it to Databases
+ :param all_jenkins_job: list
+ :return: None if Ok
+ """
+ logger.info('Gathering data from Jenkins')
+ for pj in all_jenkins_job:
+ try:
+ jr = GetJobsResults(pj)
+ all_jobs_ids = jr.get_all_jobs_ids()
+ jr.get_build_artifacts(all_jobs_ids)
+ except requests.exceptions.ConnectionError:
+ logger.warning("Got an exception. ConnectionError. Too many API requests waiting for 700 sec")
+ time.sleep(700)
+ continue
+
+
+def gathering_data_from_jenkins_one_jenkins_job(one_jenkins_job):
+ """
+ Gets console output from Jenkins jobs and save it to Databases
+ :param all_jenkins_job: list
+ :return: None if Ok
+ """
+ logger.info('Gathering data from Jenkins parallel')
+ try:
+ jr = GetJobsResults(one_jenkins_job)
+ all_jobs_ids = jr.get_all_jobs_ids()
+ jr.get_build_artifacts(all_jobs_ids)
+ except requests.exceptions.ConnectionError:
+ logger.warning("Got an exception. ConnectionError. Too many API requests waiting for 700 sec")
+ time.sleep(700)
+
+
+def get_one_jenkins_job_one_id(jenkins_job_name, job_id):
+ logger.info('Getting one Jenkins job: {}: {}'.format(jenkins_job_name, job_id))
+ try:
+ jr = GetJobsResults(jenkins_job_name)
+ jr.get_build_artifacts([int(job_id)])
+ except requests.exceptions.ConnectionError:
+ logger.warning("Got an exception. ConnectionError. Too many API requests waiting for 700 sec")
+
+ time.sleep(700)
+
+
+if __name__ == '__main__':
+ gathering_data_from_jenkins_all_jenkins_job(JOBS_FOR_GETTING_LOGS_FROM_OUTPUT)
+
diff --git a/save_jenkins_console/save_jenkins_console/utils.py b/save_jenkins_console/save_jenkins_console/utils.py
new file mode 100644
index 0000000..50d323c
--- /dev/null
+++ b/save_jenkins_console/save_jenkins_console/utils.py
@@ -0,0 +1,189 @@
+import glob
+import os
+import logging
+import time
+
+from datetime import datetime
+
+from config import LOGGIGNG_FOLDER
+from config import LOGGIGNG_UTILS
+from config import LOGS_DIRECTORY
+
+logging.basicConfig(format='[%(asctime)s][%(name)s][%(levelname)s] %(message)s',
+ datefmt='%d-%m-%Y %H:%M:%S',
+ handlers=[
+ logging.FileHandler('{}{}'.format(LOGGIGNG_FOLDER, LOGGIGNG_UTILS)),
+ logging.StreamHandler()],
+ level=logging.INFO)
+logger = logging.getLogger('jenkins_api')
+
+
+def get_date_from_timestamp(timestamp):
+ if timestamp:
+ return datetime.utcfromtimestamp(timestamp).strftime('%d-%m-%Y_%H_%M_%S')
+ return ''
+
+
+def get_last_build_id_from_db(current_database):
+ """
+ get_last_build_id_from_db
+ :param current_database:
+ self.db = TinyDB(self.patch_to_db)
+ self.current_database = self.db.all()
+
+ :return: int. Last build
+ """
+ logger.info('Getting last build id from DB')
+ build_ids = []
+ for field in current_database:
+ build_ids.append(field['build']['id'])
+ # print(build_ids)
+ # logger.info('Last build id is: {} {}'.format(current_database, max(build_ids)))
+ return max(build_ids)
+
+
+def is_directory_exists(patch_to_directory):
+ if patch_to_directory and \
+ os.path.exists(patch_to_directory):
+ return True
+ return False
+
+
+def create_directory(patch_to_directory, directory_to_create=''):
+ """
+
+ :param full_patch_to_directory: string
+ :param directory_to_create: string
+
+ :return: full_patch_to_directory
+ """
+ full_patch_to_directory = patch_to_directory + directory_to_create
+
+ if not is_directory_exists(full_patch_to_directory):
+ logger.info('Creating directory: {}'.format(full_patch_to_directory))
+ os.makedirs(full_patch_to_directory)
+ return full_patch_to_directory
+
+
+def find_and_delete_old_files(patch_to_files, older_than):
+ """
+ Just finds files older than - by date
+
+ :param patch_to_files: string
+ :param older_than: int, 3 three days
+
+ :return: list of matches files
+ """
+ logger.info('Find all files in {} older than {} days'.format(patch_to_files, older_than))
+
+ now = time.time()
+ cutoff = now - (int(older_than) * 86400)
+
+ files = [os.path.join(dp, f) for dp, dn, filenames in os.walk(patch_to_files) for f in filenames]
+
+ found_files = []
+ for xfile in files:
+ if os.path.isfile(xfile):
+ t = os.stat(xfile)
+ c = t.st_ctime
+
+ if c < cutoff:
+ logger.info('Deleting file: {}'.format(xfile))
+ os.remove(xfile)
+ found_files.append(xfile)
+ return files
+
+
+def save_to_file(filename, data_to_write):
+ """
+ Saves output to a file
+
+ :param directory:
+ :param filename:
+ :param data_to_write:
+ :return:
+ """
+ logger.info('Saving to a log file: {}'.format(filename))
+ data_to_write = data_to_write.encode('ascii', 'ignore').decode('ascii')
+ with open(filename, 'w') as f:
+ f.write(data_to_write)
+
+ return filename
+
+
+def get_patch_to_file(job_name, build_id):
+ if job_name:
+ patch_to_file = LOGS_DIRECTORY + job_name + '/' + str(build_id) + '*'
+ # logger.info(f'Getting patch to the file {patch_to_file}')
+ return patch_to_file
+ return ''
+
+
+def generate_filename(logs_directory,
+ build_id,
+ build_date,
+ job_name,
+ build_status):
+ logger.info(
+ 'Saving results to file: {}. Build result: {}'.format(
+ job_name, build_status))
+ filename = '{0}{1}/{2}_{3}_{4}_{5}.log'.format(logs_directory,
+ job_name,
+ build_id,
+ build_date,
+ job_name,
+ build_status)
+ return filename
+
+
+def check_if_file_exists(patch_to_file):
+ """
+ Checking if a File Exists
+
+ :param patch_to_file: string
+ :return: Bool
+ """
+ # logger.info('Checking if File Exists: {}'.format(patch_to_file))
+ if patch_to_file:
+ if glob.glob(patch_to_file):
+ return True
+ return False
+
+
+def create_directories_for_logs(jobs_names, logs_directory):
+ """
+ Creates directories for logs
+
+ :param logs_directory: string. Base directory for logs
+ Inside this directory other directories will be created
+ :param jobs_names: list of string
+ :return: None
+ """
+ for job_name in jobs_names:
+ logger.info('Creating directories for logs: {} {}'.format(logs_directory, job_name))
+ create_directory(logs_directory, job_name)
+
+
+def save_job_console_to_file(logs_directory, job_name, build_id, build_date,
+ build_status, data_to_write):
+ filename = generate_filename(logs_directory=logs_directory, job_name=job_name,
+ build_id=build_id, build_date=build_date,
+ build_status=build_status)
+
+ create_directory(patch_to_directory=logs_directory, directory_to_create=job_name)
+ save_to_file(filename=filename, data_to_write=data_to_write)
+
+
+def archive_files(files_to_archive, patch_to_archive):
+ """
+ Archives files
+
+ :param files_to_archive: list
+ :param patch_to_archive: string
+
+ :return: full patch + name to archive
+ """
+
+ pass
+
+