Refactor the code of osccore-qa-testing-tools to comply with PEP8.
Related-prod: PRODX-42195
Change-Id: Id05e7584e0d024127ce1bd5042cfe681a1b52e2d
diff --git a/save_jenkins_console/base.py b/save_jenkins_console/base.py
index c42c7f9..de6b5bb 100755
--- a/save_jenkins_console/base.py
+++ b/save_jenkins_console/base.py
@@ -1,12 +1,12 @@
-import time
-
from config import JOBS_FOR_GETTING_LOGS_FROM_OUTPUT
from jenkins_api import gathering_data_from_jenkins_all_jenkins_job
def main():
- gathering_data_from_jenkins_all_jenkins_job(all_jenkins_job=JOBS_FOR_GETTING_LOGS_FROM_OUTPUT)
+ gathering_data_from_jenkins_all_jenkins_job(
+ all_jenkins_job=JOBS_FOR_GETTING_LOGS_FROM_OUTPUT
+ )
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/save_jenkins_console/save_jenkins_console/config.py b/save_jenkins_console/save_jenkins_console/config.py
index b0f6cb9..93bee76 100755
--- a/save_jenkins_console/save_jenkins_console/config.py
+++ b/save_jenkins_console/save_jenkins_console/config.py
@@ -1,44 +1,44 @@
# Jenkins API credantials
-USERNAME = 'mcp-oscore-jenkins'
-PASSWORD = 'ahvoNg4mae'
-JENKINS_URL = 'https://ci.mcp.mirantis.net'
+USERNAME = "mcp-oscore-jenkins"
+PASSWORD = "ahvoNg4mae"
+JENKINS_URL = "https://ci.mcp.mirantis.net"
# Jenkins jobs names for https://ci.mcp.mirantis.net
# For example https://ci.mcp.mirantis.net/oscore-tempest-runner
JOBS_FOR_GETTING_LOGS_FROM_OUTPUT = [
- 'oscore-tempest-runner',
- 'oscore-oscc-ci',
- 'oscore-test-cookiecutter-model',
- 'rollout-mcp-env',
- 'oscore-promote-openstack-pike-xenial',
- 'oscore-promote-openstack-queens-xenial',
- 'oscore-tempest-results-checker',
- 'run-deploy-job-on-cfg01',
- 'run-job-on-cfg01-jenkins',
- 'deploy-heat-virtual_mcp11_aio',
- 'oscore-artifatcts-collector',
- 'oscore-formula-systest-virtual_mcp11_aio-queens',
- 'oscore-formula-systest-virtual_mcp11_aio-newton',
- 'oscore-formula-systest-virtual_mcp11_aio-pike',
- 'oscore-formula-systest-virtual_mcp11_aio-mitaka',
- 'oscore-test-openstack-upgrade-pike-queens-core-ssl',
- 'oscore-test-openstack-upgrade-ocata-pike-core',
- 'oscore-test-openstack-upgrade-pike-queens-core-extra-ssl',
- 'oscore-test-openstack-upgrade-pike-queens-core-barbican',
- 'create-heat-stack-for-mcp-env',
- ]
+ "oscore-tempest-runner",
+ "oscore-oscc-ci",
+ "oscore-test-cookiecutter-model",
+ "rollout-mcp-env",
+ "oscore-promote-openstack-pike-xenial",
+ "oscore-promote-openstack-queens-xenial",
+ "oscore-tempest-results-checker",
+ "run-deploy-job-on-cfg01",
+ "run-job-on-cfg01-jenkins",
+ "deploy-heat-virtual_mcp11_aio",
+ "oscore-artifatcts-collector",
+ "oscore-formula-systest-virtual_mcp11_aio-queens",
+ "oscore-formula-systest-virtual_mcp11_aio-newton",
+ "oscore-formula-systest-virtual_mcp11_aio-pike",
+ "oscore-formula-systest-virtual_mcp11_aio-mitaka",
+ "oscore-test-openstack-upgrade-pike-queens-core-ssl",
+ "oscore-test-openstack-upgrade-ocata-pike-core",
+ "oscore-test-openstack-upgrade-pike-queens-core-extra-ssl",
+ "oscore-test-openstack-upgrade-pike-queens-core-barbican",
+ "create-heat-stack-for-mcp-env",
+]
# For database_to_files.py script
# Gets data from databases ans stores them to each directory test file
# where to unpack logs from db
-LOGS_DIRECTORY = '/var/www/oscore_jobs.com/html/oscore_logs/'
+LOGS_DIRECTORY = "/var/www/oscore_jobs.com/html/oscore_logs/"
# files older than in days
FILES_OLDER_THAN = 33
# For logging all the data what is happening in this script
-LOGGIGNG_FOLDER = '/tmp/oscore_qa_logs/'
-LOGGIGNG_JENKINS_API = 'jenkins_api.log'
-LOGGIGNG_UTILS = 'utils.log'
-LOG_FILENAME = 'manage_files.log'
+LOGGIGNG_FOLDER = "/tmp/oscore_qa_logs/"
+LOGGIGNG_JENKINS_API = "jenkins_api.log"
+LOGGIGNG_UTILS = "utils.log"
+LOG_FILENAME = "manage_files.log"
diff --git a/save_jenkins_console/save_jenkins_console/jenkins_api.py b/save_jenkins_console/save_jenkins_console/jenkins_api.py
index dced5e8..7b7a35a 100755
--- a/save_jenkins_console/save_jenkins_console/jenkins_api.py
+++ b/save_jenkins_console/save_jenkins_console/jenkins_api.py
@@ -1,42 +1,51 @@
-import config
+import logging
import os
-import requests
import time
-import utils
+import config
+import requests
+import utils
+from config import (
+ JOBS_FOR_GETTING_LOGS_FROM_OUTPUT,
+ LOGGIGNG_FOLDER,
+ LOGGIGNG_JENKINS_API,
+ LOGS_DIRECTORY,
+)
from jenkinsapi import custom_exceptions
from jenkinsapi.jenkins import Jenkins
from jenkinsapi.utils.crumb_requester import CrumbRequester
-
-import logging
-
-from config import LOGGIGNG_FOLDER
-from config import LOGGIGNG_JENKINS_API
-from config import LOGS_DIRECTORY
-from config import JOBS_FOR_GETTING_LOGS_FROM_OUTPUT
from manage_files import delete_old_files
-logging.basicConfig(format='[%(asctime)s][%(name)s][%(levelname)s] %(message)s',
- datefmt='%d-%m-%Y %H:%M:%S',
- handlers=[
- logging.FileHandler('{}{}'.format(LOGGIGNG_FOLDER, LOGGIGNG_JENKINS_API)),
- logging.StreamHandler()],
- level=logging.INFO)
-logger = logging.getLogger('jenkins_api')
+logging.basicConfig(
+ format="[%(asctime)s][%(name)s][%(levelname)s] %(message)s",
+ datefmt="%d-%m-%Y %H:%M:%S",
+ handlers=[
+ logging.FileHandler(
+ "{}{}".format(LOGGIGNG_FOLDER, LOGGIGNG_JENKINS_API)
+ ),
+ logging.StreamHandler(),
+ ],
+ level=logging.INFO,
+)
+logger = logging.getLogger("jenkins_api")
class GetJobsResults:
"""
Working with Jenkins API and tiny DB
"""
+
def __init__(self, pipeline_job_name):
- self.server = Jenkins(config.JENKINS_URL,
- username=config.USERNAME,
- password=config.PASSWORD,
- requester=CrumbRequester(
- username=config.USERNAME,
- password=config.PASSWORD,
- baseurl=config.JENKINS_URL))
+ self.server = Jenkins(
+ config.JENKINS_URL,
+ username=config.USERNAME,
+ password=config.PASSWORD,
+ requester=CrumbRequester(
+ username=config.USERNAME,
+ password=config.PASSWORD,
+ baseurl=config.JENKINS_URL,
+ ),
+ )
self.job = self.server.get_job(pipeline_job_name)
self.pipeline_job_name = pipeline_job_name
@@ -51,7 +60,9 @@
:return: list. Builds ID's
"""
builds_ids = self.job.get_build_ids()
- logger.info('Getting all builds ids: {}'.format(self.pipeline_job_name))
+ logger.info(
+ "Getting all builds ids: {}".format(self.pipeline_job_name)
+ )
return list(builds_ids)
def get_job_build(self, build_id):
@@ -59,7 +70,7 @@
job_build = self.job.get_build(build_id)
return job_build
except requests.exceptions.HTTPError:
- logger.warning('404 Client Error: Not Found for url')
+ logger.warning("404 Client Error: Not Found for url")
def manage_build_artifacts(self, job_build, build, build_date):
build_artifacts = job_build.get_artifacts()
@@ -68,9 +79,11 @@
if build_artfact:
patch_to_artifact_file = utils.generate_patch_to_artifact_file(
logs_directory=LOGS_DIRECTORY,
- job_name=self.pipeline_job_name)
+ job_name=self.pipeline_job_name,
+ )
saved_artifact_file_patch = build_artfact.save_to_dir(
- patch_to_artifact_file)
+ patch_to_artifact_file
+ )
# rename saved file
new_artifact_file_patch = utils.generate_artifact_file_patch(
@@ -78,14 +91,22 @@
patch_to_artifact_file=patch_to_artifact_file,
pipeline_job_name=self.pipeline_job_name,
build_id=build,
- build_date=build_date)
- if not utils.check_if_file_exists(patch_to_file=new_artifact_file_patch):
+ build_date=build_date,
+ )
+ if not utils.check_if_file_exists(
+ patch_to_file=new_artifact_file_patch
+ ):
new_artifact_filename = utils.rename_artifact_file(
old_artifact_file_patch=saved_artifact_file_patch,
- new_artifact_file_patch=new_artifact_file_patch)
- logger.info(f'new_artifact_filename: {new_artifact_filename}')
+ new_artifact_file_patch=new_artifact_file_patch,
+ )
+ logger.info(
+ f"new_artifact_filename: {new_artifact_filename}"
+ )
- if utils.check_if_file_exists(patch_to_file=saved_artifact_file_patch):
+ if utils.check_if_file_exists(
+ patch_to_file=saved_artifact_file_patch
+ ):
os.remove(saved_artifact_file_patch)
def get_build_artifacts(self, jobs_ids_list):
@@ -96,11 +117,16 @@
:param jobs_ids_list: List of the IDs for the particular job
:return: None if Ok
"""
- logger.info('Pipeline job name, jobs IDs list: {} {}'.format(self.pipeline_job_name, jobs_ids_list))
+ logger.info(
+ "Pipeline job name, jobs IDs list: {} {}".format(
+ self.pipeline_job_name, jobs_ids_list
+ )
+ )
build_counter = 1
for build in jobs_ids_list:
patch_to_file = utils.get_patch_to_file(
- job_name=self.pipeline_job_name, build_id=build)
+ job_name=self.pipeline_job_name, build_id=build
+ )
if not utils.check_if_file_exists(patch_to_file=patch_to_file):
# If a build ID is not in the DB then add it
try:
@@ -112,22 +138,31 @@
job_console = job_build.get_console()
if job_build and job_build_status:
- # Check if Build status is not None than job is not in progress, finished
- logger.info("Saving Buid to file: {}: {} build: {} from {}".format(
- self.pipeline_job_name,
- build,
- build_counter,
- len(jobs_ids_list)))
+ # Check if Build status is not None than job is
+ # not in progress, finished
+ logger.info(
+ "Saving Buid to file: {}: {} "
+ "build: {} from {}".format(
+ self.pipeline_job_name,
+ build,
+ build_counter,
+ len(jobs_ids_list),
+ )
+ )
build_counter += 1
# When job is finished
job_timestamp = job_build.get_timestamp().timestamp()
- build_date = utils.get_date_from_timestamp(job_timestamp)
+ build_date = utils.get_date_from_timestamp(
+ job_timestamp
+ )
# save build artifacts
- self.manage_build_artifacts(job_build=job_build,
- build=build,
- build_date=build_date)
+ self.manage_build_artifacts(
+ job_build=job_build,
+ build=build,
+ build_date=build_date,
+ )
# Save data th the file
utils.save_job_console_to_file(
@@ -136,18 +171,27 @@
build_id=build,
build_date=build_date,
build_status=job_build_status,
- data_to_write=job_console)
+ data_to_write=job_console,
+ )
except custom_exceptions.NotFound:
- logger.warning("Couldn't find a build: {}: {}".format(self.pipeline_job_name, build))
+ logger.warning(
+ "Couldn't find a build: {}: {}".format(
+ self.pipeline_job_name, build
+ )
+ )
continue
continue
- logger.warning("Jenkins log output already saved: {}: {} build: {} from {}".format(
- self.pipeline_job_name,
- build,
- build_counter,
- len(jobs_ids_list)))
- build_counter +=1
+ logger.warning(
+ "Jenkins log output already saved: {}: "
+ "{} build: {} from {}".format(
+ self.pipeline_job_name,
+ build,
+ build_counter,
+ len(jobs_ids_list),
+ )
+ )
+ build_counter += 1
continue
return
@@ -159,14 +203,17 @@
:param all_jenkins_job: list
:return: None if Ok
"""
- logger.info('Gathering data from Jenkins')
+ logger.info("Gathering data from Jenkins")
for pj in all_jenkins_job:
try:
jr = GetJobsResults(pj)
all_jobs_ids = jr.get_all_jobs_ids()
jr.get_build_artifacts(all_jobs_ids)
except requests.exceptions.ConnectionError:
- logger.warning("Got an exception. ConnectionError. Too many API requests waiting for 700 sec")
+ logger.warning(
+ "Got an exception. ConnectionError. "
+ "Too many API requests waiting for 700 sec"
+ )
time.sleep(700)
continue
@@ -177,26 +224,37 @@
:param all_jenkins_job: list
:return: None if Ok
"""
- logger.info('Gathering data from Jenkins parallel')
+ logger.info("Gathering data from Jenkins parallel")
try:
jr = GetJobsResults(one_jenkins_job)
all_jobs_ids = jr.get_all_jobs_ids()
jr.get_build_artifacts(all_jobs_ids)
except requests.exceptions.ConnectionError:
- logger.warning("Got an exception. ConnectionError. Too many API requests waiting for 700 sec")
+ logger.warning(
+ "Got an exception. ConnectionError. "
+ "Too many API requests waiting for 700 sec"
+ )
time.sleep(700)
def get_one_jenkins_job_one_id(jenkins_job_name, job_id):
- logger.info('Getting one Jenkins job: {}: {}'.format(jenkins_job_name, job_id))
+ logger.info(
+ "Getting one Jenkins job: {}: {}".format(jenkins_job_name, job_id)
+ )
try:
jr = GetJobsResults(jenkins_job_name)
jr.get_build_artifacts([int(job_id)])
except requests.exceptions.ConnectionError:
- logger.warning("Got an exception. ConnectionError. Too many API requests waiting for 700 sec")
+ logger.warning(
+ "Got an exception. ConnectionError. "
+ "Too many API requests waiting for 700 sec"
+ )
time.sleep(700)
-if __name__ == '__main__':
- gathering_data_from_jenkins_all_jenkins_job(JOBS_FOR_GETTING_LOGS_FROM_OUTPUT)
+
+if __name__ == "__main__":
+ gathering_data_from_jenkins_all_jenkins_job(
+ JOBS_FOR_GETTING_LOGS_FROM_OUTPUT
+ )
delete_old_files(config.FILES_OLDER_THAN, config.LOGS_DIRECTORY)
diff --git a/save_jenkins_console/save_jenkins_console/manage_files.py b/save_jenkins_console/save_jenkins_console/manage_files.py
index 0836624..070f490 100755
--- a/save_jenkins_console/save_jenkins_console/manage_files.py
+++ b/save_jenkins_console/save_jenkins_console/manage_files.py
@@ -17,18 +17,24 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import config
import logging
import os
import time
+import config
+
logging.basicConfig(
- format='[%(asctime)s][%(name)s][%(levelname)s] %(message)s',
- datefmt='%d-%m-%Y %H:%M:%S',
- handlers=[logging.FileHandler('{}{}'.format(
- config.LOGGIGNG_FOLDER, config.LOG_FILENAME)), logging.StreamHandler()],
- level=logging.INFO)
-logger = logging.getLogger('delete_old_files')
+ format="[%(asctime)s][%(name)s][%(levelname)s] %(message)s",
+ datefmt="%d-%m-%Y %H:%M:%S",
+ handlers=[
+ logging.FileHandler(
+ "{}{}".format(config.LOGGIGNG_FOLDER, config.LOG_FILENAME)
+ ),
+ logging.StreamHandler(),
+ ],
+ level=logging.INFO,
+)
+logger = logging.getLogger("delete_old_files")
def delete_old_files(days, path):
@@ -36,7 +42,7 @@
time_secs = time.time() - (days * 86400)
for root, dirs, files in os.walk(path, topdown=False):
for file in files:
- full_path = os.path.join(root,file)
+ full_path = os.path.join(root, file)
stat = os.stat(full_path)
if stat.st_mtime <= time_secs:
logger.info("removing: {}".format(full_path))
diff --git a/save_jenkins_console/save_jenkins_console/utils.py b/save_jenkins_console/save_jenkins_console/utils.py
index 4a97455..4de84f3 100755
--- a/save_jenkins_console/save_jenkins_console/utils.py
+++ b/save_jenkins_console/save_jenkins_console/utils.py
@@ -1,27 +1,29 @@
import glob
-import os
import logging
+import os
import time
-
from datetime import datetime
-from config import LOGGIGNG_FOLDER
-from config import LOGGIGNG_UTILS
-from config import LOGS_DIRECTORY
+from config import LOGGIGNG_FOLDER, LOGGIGNG_UTILS, LOGS_DIRECTORY
-logging.basicConfig(format='[%(asctime)s][%(name)s][%(levelname)s] %(message)s',
- datefmt='%d-%m-%Y %H:%M:%S',
- handlers=[
- logging.FileHandler('{}{}'.format(LOGGIGNG_FOLDER, LOGGIGNG_UTILS)),
- logging.StreamHandler()],
- level=logging.INFO)
-logger = logging.getLogger('jenkins_api')
+logging.basicConfig(
+ format="[%(asctime)s][%(name)s][%(levelname)s] %(message)s",
+ datefmt="%d-%m-%Y %H:%M:%S",
+ handlers=[
+ logging.FileHandler("{}{}".format(LOGGIGNG_FOLDER, LOGGIGNG_UTILS)),
+ logging.StreamHandler(),
+ ],
+ level=logging.INFO,
+)
+logger = logging.getLogger("jenkins_api")
def get_date_from_timestamp(timestamp):
if timestamp:
- return datetime.utcfromtimestamp(timestamp).strftime('%d-%m-%Y_%H_%M_%S')
- return ''
+ return datetime.utcfromtimestamp(timestamp).strftime(
+ "%d-%m-%Y_%H_%M_%S"
+ )
+ return ""
def get_last_build_id_from_db(current_database):
@@ -33,23 +35,23 @@
:return: int. Last build
"""
- logger.info('Getting last build id from DB')
+ logger.info("Getting last build id from DB")
build_ids = []
for field in current_database:
- build_ids.append(field['build']['id'])
+ build_ids.append(field["build"]["id"])
# print(build_ids)
- # logger.info('Last build id is: {} {}'.format(current_database, max(build_ids)))
+ # logger.info('Last build id is: {} {}'
+ # .format(current_database, max(build_ids)))
return max(build_ids)
def is_directory_exists(patch_to_directory):
- if patch_to_directory and \
- os.path.exists(patch_to_directory):
+ if patch_to_directory and os.path.exists(patch_to_directory):
return True
return False
-def create_directory(patch_to_directory, directory_to_create=''):
+def create_directory(patch_to_directory, directory_to_create=""):
"""
:param full_patch_to_directory: string
@@ -60,7 +62,7 @@
full_patch_to_directory = patch_to_directory + directory_to_create
if not is_directory_exists(full_patch_to_directory):
- logger.info('Creating directory: {}'.format(full_patch_to_directory))
+ logger.info("Creating directory: {}".format(full_patch_to_directory))
os.makedirs(full_patch_to_directory)
return full_patch_to_directory
@@ -74,12 +76,20 @@
:return: list of matches files
"""
- logger.info('Find all files in {} older than {} days'.format(patch_to_files, older_than))
+ logger.info(
+ "Find all files in {} older than {} days".format(
+ patch_to_files, older_than
+ )
+ )
now = time.time()
cutoff = now - (int(older_than) * 86400)
- files = [os.path.join(dp, f) for dp, dn, filenames in os.walk(patch_to_files) for f in filenames]
+ files = [
+ os.path.join(dp, f)
+ for dp, dn, filenames in os.walk(patch_to_files)
+ for f in filenames
+ ]
found_files = []
for xfile in files:
@@ -88,7 +98,7 @@
c = t.st_ctime
if c < cutoff:
- logger.info('Deleting file: {}'.format(xfile))
+ logger.info("Deleting file: {}".format(xfile))
os.remove(xfile)
found_files.append(xfile)
return files
@@ -103,9 +113,9 @@
:param data_to_write:
:return:
"""
- logger.info('Saving to a log file: {}'.format(filename))
- data_to_write = data_to_write.encode('ascii', 'ignore').decode('ascii')
- with open(filename, 'w') as f:
+ logger.info("Saving to a log file: {}".format(filename))
+ data_to_write = data_to_write.encode("ascii", "ignore").decode("ascii")
+ with open(filename, "w") as f:
f.write(data_to_write)
return filename
@@ -113,26 +123,23 @@
def get_patch_to_file(job_name, build_id):
if job_name:
- patch_to_file = LOGS_DIRECTORY + job_name + '/' + str(build_id) + '*'
+ patch_to_file = LOGS_DIRECTORY + job_name + "/" + str(build_id) + "*"
# logger.info(f'Getting patch to the file {patch_to_file}')
return patch_to_file
- return ''
+ return ""
-def generate_filename(logs_directory,
- build_id,
- build_date,
- job_name,
- build_status):
+def generate_filename(
+ logs_directory, build_id, build_date, job_name, build_status
+):
logger.info(
- 'Saving results to file: {}. Build result: {}'.format(
- job_name, build_status))
- filename = '{0}{1}/{2}_{3}_{4}_{5}.txt'.format(logs_directory,
- job_name,
- build_id,
- build_date,
- job_name,
- build_status)
+ "Saving results to file: {}. Build result: {}".format(
+ job_name, build_status
+ )
+ )
+ filename = "{0}{1}/{2}_{3}_{4}_{5}.txt".format(
+ logs_directory, job_name, build_id, build_date, job_name, build_status
+ )
return filename
@@ -160,43 +167,61 @@
:return: None
"""
for job_name in jobs_names:
- logger.info('Creating directories for logs: {} {}'.format(logs_directory, job_name))
+ logger.info(
+ "Creating directories for logs: {} {}".format(
+ logs_directory, job_name
+ )
+ )
create_directory(logs_directory, job_name)
-def save_job_console_to_file(logs_directory, job_name, build_id, build_date,
- build_status, data_to_write):
- filename = generate_filename(logs_directory=logs_directory, job_name=job_name,
- build_id=build_id, build_date=build_date,
- build_status=build_status)
+def save_job_console_to_file(
+ logs_directory, job_name, build_id, build_date, build_status, data_to_write
+):
+ filename = generate_filename(
+ logs_directory=logs_directory,
+ job_name=job_name,
+ build_id=build_id,
+ build_date=build_date,
+ build_status=build_status,
+ )
- create_directory(patch_to_directory=logs_directory, directory_to_create=job_name)
+ create_directory(
+ patch_to_directory=logs_directory, directory_to_create=job_name
+ )
save_to_file(filename=filename, data_to_write=data_to_write)
def generate_patch_to_artifact_file(logs_directory, job_name):
- return '{0}{1}/'.format(logs_directory, job_name)
+ return "{0}{1}/".format(logs_directory, job_name)
-def generate_artifact_file_patch(saved_artifact_file_patch,
- patch_to_artifact_file,
- pipeline_job_name,
- build_id,
- build_date):
- if 'kubeconfig' in saved_artifact_file_patch:
- artifact_extention = 'txt'
- artifact_filename = saved_artifact_file_patch.split('/')[-1]
- filename = f'{str(build_id)}_{build_date}_{pipeline_job_name}_{artifact_filename}.{artifact_extention}'
+def generate_artifact_file_patch(
+ saved_artifact_file_patch,
+ patch_to_artifact_file,
+ pipeline_job_name,
+ build_id,
+ build_date,
+):
+ if "kubeconfig" in saved_artifact_file_patch:
+ artifact_extention = "txt"
+ artifact_filename = saved_artifact_file_patch.split("/")[-1]
+ filename = (
+ f"{str(build_id)}_{build_date}_{pipeline_job_name}"
+ f"_{artifact_filename}.{artifact_extention}"
+ )
full_patch = patch_to_artifact_file + filename
else:
- artifact_extention = saved_artifact_file_patch.split('.')[-1]
- filename = f'{str(build_id)}_{build_date}_{pipeline_job_name}.{artifact_extention}'
+ artifact_extention = saved_artifact_file_patch.split(".")[-1]
+ filename = (
+ f"{str(build_id)}_{build_date}_{pipeline_job_name}"
+ f".{artifact_extention}"
+ )
full_patch = patch_to_artifact_file + filename
- logger.info(f'Full file patch: {full_patch}')
+ logger.info(f"Full file patch: {full_patch}")
return full_patch
def rename_artifact_file(old_artifact_file_patch, new_artifact_file_patch):
os.rename(old_artifact_file_patch, new_artifact_file_patch)
return new_artifact_file_patch
-