Merge "Make name of params consistent and do a little refactoring"
diff --git a/src/com/mirantis/mk/Openstack.groovy b/src/com/mirantis/mk/Openstack.groovy
index 7a45dbe..9fa683f 100644
--- a/src/com/mirantis/mk/Openstack.groovy
+++ b/src/com/mirantis/mk/Openstack.groovy
@@ -18,79 +18,62 @@
 /**
  * Install OpenStack service clients in isolated environment
  *
- * @param path Path where virtualenv is created
- * @param version Version of the OpenStack clients
+ * @param path        Path where virtualenv is created
+ * @param version     Version of the OpenStack clients
  */
 
 def setupOpenstackVirtualenv(path, version = 'latest') {
-    setupOpenstackVirtualenv(['path': path, 'version': version])
-}
-
-/**
- * Install OpenStack service clients in isolated environment
- *
- * @param config : Config map with opts:
- *        path Path where virtualenv is created
- *        version Version of the OpenStack clients
- *        pyversion Version of the Python exec.
- *        requirements list of additional pip packages to be installed
- *
- */
-
-def setupOpenstackVirtualenv(LinkedHashMap config) {
     def python = new com.mirantis.mk.Python()
+    python.setupDocutilsVirtualenv(path)
 
-    def path = config.path
-    def version = config.get('version', 'latest')
-    def pyversion = config.get('pyversion', 'python2')
-    def requirements = config.get('requirements', [])
-    def os_requirements = []
+    def openstack_kilo_packages = [
+        //XXX: hack to fix https://bugs.launchpad.net/ubuntu/+source/python-pip/+bug/1635463
+        'cliff==2.8',
+        'python-cinderclient>=1.3.1,<1.4.0',
+        'python-glanceclient>=0.19.0,<0.20.0',
+        'python-heatclient>=0.6.0,<0.7.0',
+        'python-keystoneclient>=1.6.0,<1.7.0',
+        'python-neutronclient>=2.2.6,<2.3.0',
+        'python-novaclient>=2.19.0,<2.20.0',
+        'python-swiftclient>=2.5.0,<2.6.0',
+        'python-openstackclient>=1.7.0,<1.8.0',
+        'oslo.config>=2.2.0,<2.3.0',
+        'oslo.i18n>=2.3.0,<2.4.0',
+        'oslo.serialization>=1.8.0,<1.9.0',
+        'oslo.utils>=1.4.0,<1.5.0',
+        'docutils'
+    ]
 
+    def openstack_latest_packages = [
+        //XXX: hack to fix https://bugs.launchpad.net/ubuntu/+source/python-pip/+bug/1635463
+        'cliff==2.8',
+        // NOTE(vsaienko): cmd2 is dependency for cliff, since we don't using upper-contstraints
+        // we have to pin cmd2 < 0.9.0 as later versions are not compatible with python2.
+        // the same for warlock package due: https://github.com/bcwaldon/warlock/commit/4241a7a9fbccfce7eb3298c2abdf00ca2dede64a
+        // TODO(vsaienko): use upper-constraints here, as in requirements we set only lowest library
+        //                 versions.
+        'cmd2<0.9.0;python_version=="2.7"',
+        'cmd2>=0.9.1;python_version=="3.4"',
+        'cmd2>=0.9.1;python_version=="3.5"',
+        'warlock<=1.3.1;python_version=="2.7"',
+        'warlock>1.3.1;python_version=="3.4"',
+        'warlock>1.3.1;python_version=="3.5"',
+        'python-openstackclient',
+        'python-octaviaclient',
+        'python-heatclient',
+        'docutils'
+    ]
 
-    if (version in ['kilo', 'liberty', 'mitaka']) {
-        os_requirements = [
-            //XXX: hack to fix https://bugs.launchpad.net/ubuntu/+source/python-pip/+bug/1635463
-            'cliff==2.8',
-            'python-cinderclient>=1.3.1,<1.4.0',
-            'python-glanceclient>=0.19.0,<0.20.0',
-            'python-heatclient>=0.6.0,<0.7.0',
-            'python-keystoneclient>=1.6.0,<1.7.0',
-            'python-neutronclient>=2.2.6,<2.3.0',
-            'python-novaclient>=2.19.0,<2.20.0',
-            'python-swiftclient>=2.5.0,<2.6.0',
-            'python-openstackclient>=1.7.0,<1.8.0',
-            'oslo.config>=2.2.0,<2.3.0',
-            'oslo.i18n>=2.3.0,<2.4.0',
-            'oslo.serialization>=1.8.0,<1.9.0',
-            'oslo.utils>=1.4.0,<1.5.0',
-            'docutils'
-        ]
+    if (version == 'kilo') {
+        requirements = openstack_kilo_packages
+    } else if (version == 'liberty') {
+        requirements = openstack_kilo_packages
+    } else if (version == 'mitaka') {
+        requirements = openstack_kilo_packages
     } else {
-        pyversion = 'python3'
-        os_requirements = [
-            //XXX: hack to fix https://bugs.launchpad.net/ubuntu/+source/python-pip/+bug/1635463
-            'cliff==2.8',
-            // NOTE(vsaienko): cmd2 is dependency for cliff, since we don't using upper-contstraints
-            // we have to pin cmd2 < 0.9.0 as later versions are not compatible with python2.
-            // the same for warlock package due: https://github.com/bcwaldon/warlock/commit/4241a7a9fbccfce7eb3298c2abdf00ca2dede64a
-            // TODO(vsaienko): use upper-constraints here, as in requirements we set only lowest library
-            //                 versions.
-            'cmd2<0.9.0;python_version=="2.7"',
-            'cmd2>=0.9.1;python_version=="3.4"',
-            'cmd2>=0.9.1;python_version=="3.5"',
-            'warlock<=1.3.1;python_version=="2.7"',
-            'warlock>1.3.1;python_version=="3.4"',
-            'warlock>1.3.1;python_version=="3.5"',
-            'python-openstackclient',
-            'python-octaviaclient',
-            'python-heatclient',
-            'docutils'
-        ]
+        requirements = openstack_latest_packages
     }
-    requirements = requirements + os_requirements
-    // (alexz): could we install docutils at single stage?
-    python.setupVirtualenv(path, pyversion, ['docutils'], null, true)
-    python.setupVirtualenv(path, pyversion, requirements.unique(), null, true)
+    python.setupVirtualenv(path, 'python2', requirements, null, true)
 }
 
 /**
diff --git a/src/com/mirantis/mk/Workflow.groovy b/src/com/mirantis/mk/Workflow.groovy
new file mode 100644
index 0000000..e798a19
--- /dev/null
+++ b/src/com/mirantis/mk/Workflow.groovy
@@ -0,0 +1,203 @@
+package com.mirantis.mk
+
+/**
+ *
+ * Run a simple workflow
+ *
+ * Function runScenario() executes a sequence of jobs, like
+ * - Parameters for the jobs are taken from the 'env' object
+ * - URLs of artifacts from completed jobs may be passed
+ *   as parameters to the next jobs.
+ *
+ * No constants, environment specific logic or other conditional dependencies.
+ * All the logic should be placed in the workflow jobs, and perform necessary
+ * actions depending on the job parameters.
+ * The runScenario() function only provides the
+ *
+ */
+
+
+/**
+ * Run a Jenkins job using the collected parameters
+ *
+ * @param job_name          Name of the running job
+ * @param job_parameters    Map that declares which values from global_variables should be used, in the following format:
+ *                          {'PARAM_NAME': {'type': <job parameter $class name>, 'use_variable': <a key from global_variables>}, ...}
+ * @param global_variables  Map that keeps the artifact URLs and used 'env' objects:
+ *                          {'PARAM1_NAME': <param1 value>, 'PARAM2_NAME': 'http://.../artifacts/param2_value', ...}
+ * @param propagate         Boolean. If false: allows to collect artifacts after job is finished, even with FAILURE status
+ *                          If true: immediatelly fails the pipeline. DO NOT USE 'true' if you want to collect artifacts
+ *                          for 'finally' steps
+ */
+def runJob(job_name, job_parameters, global_variables, Boolean propagate = false) {
+    def parameters = []
+
+    // Collect required parameters from 'global_variables' or 'env'
+    for (param in job_parameters) {
+        if (!global_variables[param.value.use_variable]) {
+            global_variables[param.value.use_variable] = env[param.value.use_variable] ?: ''
+        }
+        parameters.add([$class: "${param.value.type}", name: "${param.key}", value: global_variables[param.value.use_variable]])
+        println "${param.key}: <${param.value.type}> ${global_variables[param.value.use_variable]}"
+    }
+
+    // Build the job
+    def job_info = build job: "${job_name}", parameters: parameters, propagate: propagate
+    return job_info
+}
+
+/**
+ * Store URLs of the specified artifacts to the global_variables
+ *
+ * @param build_url         URL of the completed job
+ * @param step_artifacts    Map that contains artifact names in the job, and variable names
+ *                          where the URLs to that atrifacts should be stored, for example:
+ *                          {'ARTIFACT1': 'logs.tar.gz', 'ARTIFACT2': 'test_report.xml', ...}
+ * @param global_variables  Map that will keep the artifact URLs. Variable 'ARTIFACT1', for example,
+ *                          be used in next job parameters: {'ARTIFACT1_URL':{ 'use_variable': 'ARTIFACT1', ...}}
+ *
+ *                          If the artifact with the specified name not found, the parameter ARTIFACT1_URL
+ *                          will be empty.
+ *
+ */
+def storeArtifacts(build_url, step_artifacts, global_variables) {
+    def http = new com.mirantis.mk.Http()
+    def base = [:]
+    base["url"] = build_url
+    def job_config = http.restGet(base, "/api/json/")
+    def job_artifacts = job_config['artifacts']
+    for (artifact in step_artifacts) {
+        def job_artifact = job_artifacts.findAll { item -> artifact.value == item['fileName'] || artifact.value == item['relativePath'] }
+        if (job_artifact.size() == 1) {
+            // Store artifact URL
+            def artifact_url = "${build_url}artifact/${job_artifact[0]['relativePath']}"
+            global_variables[artifact.key] = artifact_url
+            println "Artifact URL ${artifact_url} stored to ${artifact.key}"
+        } else if (job_artifact.size() > 1) {
+            // Error: too many artifacts with the same name, fail the job
+            error "Multiple artifacts ${artifact.value} for ${artifact.key} found in the build results ${build_url}, expected one:\n${job_artifact}"
+        } else {
+            // Warning: no artifact with expected name
+            println "Artifact ${artifact.value} for ${artifact.key} not found in the build results ${build_url}, found the following artifacts:\n${job_artifacts}"
+        }
+    }
+}
+
+
+/**
+ * Run the workflow or final steps one by one
+ *
+ * @param steps                   List of steps (Jenkins jobs) to execute
+ * @param global_variables        Map where the collected artifact URLs and 'env' objects are stored
+ * @param failed_jobs             Map with failed job names and result statuses, to report it later
+ * @param propagate               Boolean. If false: allows to collect artifacts after job is finished, even with FAILURE status
+ *                                If true: immediatelly fails the pipeline. DO NOT USE 'true' with runScenario().
+ */
+def runSteps(steps, global_variables, failed_jobs, Boolean propagate = false) {
+    currentBuild.description = ''
+    for (step in steps) {
+        stage("Running job ${step['job']}") {
+
+            def job_name = step['job']
+            def job_parameters = step['parameters']
+            // Collect job parameters and run the job
+            def job_info = runJob(job_name, job_parameters, global_variables, propagate)
+            def job_result = job_info.getResult()
+            def build_url = job_info.getAbsoluteUrl()
+            def build_description = job_info.getDescription()
+
+            currentBuild.description += "<a href=${build_url}>${job_name}</a>: ${job_result}<br>"
+            // Import the remote build description into the current build
+            if (build_description) { // TODO -  add also the job status
+                currentBuild.description += build_description
+            }
+
+            // Store links to the resulting artifacts into 'global_variables'
+            storeArtifacts(build_url, step['artifacts'], global_variables)
+
+            // Job failed, fail the build or keep going depending on 'ignore_failed' flag
+            if (job_result != "SUCCESS") {
+                def job_ignore_failed = step['ignore_failed'] ?: false
+                failed_jobs[build_url] = job_result
+                if (job_ignore_failed) {
+                    println "Job ${build_url} finished with result: ${job_result}"
+                } else {
+                    currentBuild.result = job_result
+                    error "Job ${build_url} finished with result: ${job_result}"
+                }
+            } // if (job_result == "SUCCESS")
+        } // stage ("Running job ${step['job']}")
+    } // for (step in scenario['workflow'])
+}
+
+/**
+ * Run the workflow scenario
+ *
+ * @param scenario: Map with scenario steps.
+
+ * There are two keys in the scenario:
+ *   workflow: contains steps to run deploy and test jobs
+ *   finally: contains steps to run report and cleanup jobs
+ *
+ * Scenario execution example:
+ *
+ *     scenario_yaml = """\
+ *     workflow:
+ *     - job: deploy-kaas
+ *       ignore_failed: false
+ *       parameters:
+ *         KAAS_VERSION:
+ *           type: StringParameterValue
+ *           use_variable: KAAS_VERSION
+ *       artifacts:
+ *         KUBECONFIG_ARTIFACT: artifacts/management_kubeconfig
+ *
+ *     - job: test-kaas-ui
+ *       ignore_failed: false
+ *       parameters:
+ *         KUBECONFIG_ARTIFACT_URL:
+ *           type: StringParameterValue
+ *           use_variable: KUBECONFIG_ARTIFACT
+ *       artifacts:
+ *         REPORT_SI_KAAS_UI: artifacts/test_kaas_ui_result.xml
+ *
+ *     finally:
+ *     - job: testrail-report
+ *       ignore_failed: true
+ *       parameters:
+ *         REPORT_SI_KAAS_UI_URL:
+ *           type: StringParameterValue
+ *           use_variable: REPORT_SI_KAAS_UI
+ *     """
+ *
+ *     runScenario(scenario)
+ *
+ */
+
+def runScenario(scenario) {
+
+    // Collect the parameters for the jobs here
+    global_variables = [:]
+    // List of failed jobs to show at the end
+    failed_jobs = [:]
+
+    try {
+        // Run the 'workflow' jobs
+        runSteps(scenario['workflow'], global_variables, failed_jobs)
+
+    } catch (InterruptedException x) {
+        error "The job was aborted"
+
+    } catch (e) {
+        error("Build failed: " + e.toString())
+
+    } finally {
+        // Run the 'finally' jobs
+        runSteps(scenario['finally'], global_variables, failed_jobs)
+
+        if (failed_jobs) {
+            println "Failed jobs: ${failed_jobs}"
+            currentBuild.result = "FAILED"
+        }
+    } // try
+}