Completed first version of pipeline for generating salt-model sphing documentation

Change-Id: Ia65ec675c96138c26a937ad3448d9d8404835c6c
diff --git a/test-salt-formula-docs-pipeline.groovy b/generate-salt-model-docs-pipeline.groovy
similarity index 64%
rename from test-salt-formula-docs-pipeline.groovy
rename to generate-salt-model-docs-pipeline.groovy
index e026257..4a36f0e 100644
--- a/test-salt-formula-docs-pipeline.groovy
+++ b/generate-salt-model-docs-pipeline.groovy
@@ -1,18 +1,11 @@
 /**
- * Pipeline for generating and testing sphinx generated documentation
+ * Pipeline for generating sphinx reclass generated documentation
  * MODEL_GIT_URL
  * MODEL_GIT_REF
  * CLUSTER_NAME
  *
  */
 
-def gerritRef
-try {
-  gerritRef = GERRIT_REFSPEC
-} catch (MissingPropertyException e) {
-  gerritRef = null
-}
-
 common = new com.mirantis.mk.Common()
 ssh = new com.mirantis.mk.Ssh()
 gerrit = new com.mirantis.mk.Gerrit()
@@ -25,7 +18,7 @@
     try {
        def workspace = common.getWorkspace()
        def masterName = "cfg01." + CLUSTER_NAME.replace("-","_") + ".lab"
-       //def jenkinsUserIds = common.getJenkinsUserIds()
+       def jenkinsUserIds = common.getJenkinsUserIds()
        def img = docker.image("tcpcloud/salt-models-testing:nightly")
        img.pull()
        img.inside("-u root:root --hostname ${masterName} --ulimit nofile=4096:8192 --cpus=2") {
@@ -39,15 +32,13 @@
                 if (fileExists('classes/system')) {
                     ssh.prepareSshAgentKey(CREDENTIALS_ID)
                     dir('classes/system') {
-                      // XXX: JENKINS-33510 dir step not work properly inside containers
+                      // XXX: JENKINS-33510 dir step not work properly inside containers, so let's taky reclass system model directly
                       //remoteUrl = git.getGitRemote()
                       ssh.ensureKnownHosts("https://github.com/Mirantis/reclass-system-salt-model")
                     }
                     ssh.agentSh("git submodule init; git submodule sync; git submodule update --recursive")
                 }
               }
-              // install all formulas
-              sh("apt-get update && apt-get install -y salt-formula-*")
               withEnv(["MASTER_HOSTNAME=${masterName}", "CLUSTER_NAME=${CLUSTER_NAME}", "MINION_ID=${masterName}"]){
                     sh("cp -r ${workspace}/* /srv/salt/reclass && echo '127.0.1.2  salt' >> /etc/hosts")
                     sh("""bash -c 'source /srv/salt/scripts/bootstrap.sh; cd /srv/salt/scripts \
@@ -59,14 +50,6 @@
                           saltmaster_init'""")
               }
            }
-           stage("Checkout formula review"){
-              if(gerritRef){
-                //TODO: checkout gerrit review and replace formula content in directory
-                // gerrit.gerritPatchsetCheckout([credentialsId: CREDENTIALS_ID])
-              }else{
-                common.successMsg("Test triggered manually, so skipping checkout formula review stage")
-              }
-           }
            stage("Generate documentation"){
                 def saltResult = sh(script:"salt-call state.sls salt.minion,sphinx.server,nginx", returnStatus:true)
                 if(saltResult > 0){
@@ -74,24 +57,30 @@
                 }
            }
            stage("Publish outputs"){
-                try{
-                  sh("mkdir ${workspace}/output")
-                  //TODO: verify existance of created output files
-                  // /srv/static/sites/reclass_doc will be used for publishHTML step
-                  sh("tar -zcf ${workspace}/output/docs-html.tar.gz /srv/static/sites/reclass_doc")
-                  sh("cp -R /srv/static/sites/reclass_doc ${workspace}")
-                  publishHTML (target: [
-                      reportDir: 'reclass_doc',
-                      reportFiles: 'index.html',
-                      reportName: "Reclass-documentation"
-                  ])
-                  // /srv/static/extern will be used as tar artifact
-                  sh("tar -zcf ${workspace}/output/docs-src.tar.gz /srv/static/extern")
-                  archiveArtifacts artifacts: "output/*"
-                }catch(Exception e){
+                try {
+                    // /srv/static/sites/reclass_doc will be used for publishHTML step
+                    // /srv/static/extern will be used as tar artifact
+                    def outputPresent = sh(script:"ls /srv/static/sites/reclass_doc > /dev/null 2>&1 && ls /srv/static/extern  > /dev/null 2>&1", returnStatus: true) == 0
+                    if(outputPresent){
+                      sh("""mkdir ${workspace}/output && \
+                            tar -zcf ${workspace}/output/docs-html.tar.gz /srv/static/sites/reclass_doc && \
+                            tar -zcf ${workspace}/output/docs-src.tar.gz /srv/static/extern && \
+                            cp -R /srv/static/sites/reclass_doc ${workspace}/output && \
+                            chown -R ${jenkinsUserIds[0]}:${jenkinsUserIds[1]} ${workspace}/output""")
+
+                      publishHTML (target: [
+                          alwaysLinkToLastBuild: true,
+                          keepAll: true,
+                          reportDir: 'output/reclass_doc',
+                          reportFiles: 'index.html',
+                          reportName: "Reclass-documentation"
+                      ])
+                      archiveArtifacts artifacts: "output/*"
+                  } else {
+                    common.errorMsg("Documentation publish failed, one of output directories /srv/static/sites/reclass_doc or /srv/static/extern not exists!")
+                  }
+                } catch(Exception e) {
                     common.errorMsg("Documentation publish stage failed!")
-                }finally{
-                   sh("rm -r ./output")
                 }
            }
        }