Jenkins pipeline - Upon error go to postFailure block - bash

Folks,
I have the below Jenkins pipeline. I want to run all the stages, hence I got catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') in place.
I have a script minion_notification.sh which produces file called "jk_buildpass.txt". Now, I want my build to got to postFailure block whenever condition if [ ! -s jk_buildpass.txt ] is matched. With exit 1 the job is not failing and the consequent stages are getting executed. Any idea?
pipeline = {
ansiColor('xterm') {
FILEVERSION = params.FILEVERSION.trim()
def remote = [:]
remote.name = 'xxx'
remote.host = '10.xxx.xx.x'
remote.allowAnyHosts = true
withCredentials([usernamePassword(credentialsId: 'saltmaster', passwordVariable: 'password', usernameVariable: 'ops')]) {
remote.user = 'ops'
remote.password = password
stage (' Backup') {
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh './minion_notification.sh "${FILEVERSION}" "Validate" "backupdb"'
sh(returnStdout: true, script: '''#!/bin/bash
if [ ! -s jk_buildpass.txt ];then
exit 1
else
echo "jk_buildpass is not empty"
fi
'''.stripIndent())
}
}
stage (' Uninstall') {
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh './minion_notification.sh "${FILEVERSION}" "Validate" "IRMUninstall"'
sh(returnStdout: true, script: '''#!/bin/bash
if [ ! -s jk_buildpass.txt ];then
exit 1
else
echo "jk_buildpass is not empty"
fi
'''.stripIndent())
}
}
stage('Run DDMU') {
sh './minion_notification.sh "${FILEVERSION}" "jenkins-display"'
}
}
// sh './slk_notify.sh "Upgrade successfully completed for - ${FILEVERSION} " "*********** " "good" ":jenkins:"
}
}
postFailure = {
sh './slk_notify.sh " Upgarde failed for - ${FILEVERSION} " "danger" ":jenkinserror:"'
}
postAlways = {
echo 'Cleaning Workspace now'
env.WORKSPACE = pwd()
//sh "rm ${env.WORKSPACE}/* -fr"
}
node{
properties([
parameters([
string(name: 'FILEVERSION', defaultValue: '', description: 'Enter the file name to be process ')
])
])
try {
pipeline()
} catch (e) {
postFailure()
throw e
} finally {
postAlways()
}
}

Related

When i am fetcheing data from powershell script to Jenkins I am getting below error

pipeline{
agent none
stages{
stage('Powershell Data'){
agent{label 'data'}
steps{
script{
OUTPUT = bat(returnStdout: true, script : '#powershell -File C:\\Users\\kreddy\\Powershell_Scripts\\PowerShell.ps1').replaceAll(~/\n/, '').replace('-','').trim();
echo
}
}
}
}
}

Jenkins copy files error after docker npm build

I have a script similar to this:
pipeline {
agent {
docker {
label 'dev'
image 'node:12-alpine'
args '-p 3000:3000'
}
}
environment {
HOME = '.'
}
stages {
stage('clone repo') {
steps {
git(
url: '...',
credentialsId: '...',
branch: 'master'
)
}
}
stage('install dependency packages') {
steps {
sh 'npm install'
}
}
stage('build prod ready enviroment') {
steps {
sh 'npm run build'
}
}
stage('deploy') {
agent { node { label 'dev' } }
steps {
sh "cp -rf ./build/* /opt/www_folder/"
}
}
}
}
Now everything works fine except deploy stage which just hangs up building process. If I run only last stage (deploy) separately without other stages it works fine. I think there is a conflict with a docker agent but I don't know how to fix it.
I'm not sure if it is the best answer but I managed to fix my problem with this script:
pipeline {
agent none
environment {
HOME = '.'
}
stages {
stage('clone repo') {
agent { node { label 'dev' } }
steps {
git(
url: '...',
credentialsId: '...',
branch: 'master'
)
}
}
stage('install and build') {
agent {
docker {
label 'dev'
image 'node:12-alpine'
args '-p 3000:3000'
}
}
steps {
sh 'npm install'
sh 'npm run build'
}
}
stage('deploy') {
agent { node { label 'dev' } }
steps {
sh "rm -rf /opt/www_folder/*"
sh "cp -rf ./build/* /opt/www_folder/"
}
}
}
}

How to pass paramaters to remote script in a Jenkins pipeline

I am working on a declarative jenkins pipeline where I am trying to login to remote host and execute shell script. Below is the sample snippet. I want to know How to pass a parameter to my script.sh script.
#!/bin/bash
echo "argument $1"
below is the pipeline script
hosts = [“x.x.x”, “x.x.x”]
pipeline {
agent { node { label 'docker' } }
parameters {
choice(name: 'stageparam', choices: ['build', 'deploy'], description: ‘xyz’)
string(name: 'Username', defaultValue: ‘abc’, description: 'enter username')
}
stages {
stage('Setup') {
steps {
script {
pom = getPom(effective: false)
}
}
}
stage('Deploy') {
steps {
script {
def targetServers = null
if (stageparam == "deploy") {
targetServers = hosts
}
targetServers.each { server ->
echo "Server : ${server}"
def remote = [:]
remote.name = ‘server’
remote.host = server
remote.user = Username
def pass = passwordParameter description: "Enter password for user ${remote.user} "
remote.password = pass
remote.allowAnyHosts = true
stage('Remote SSH') {
sshPut remote: remote, from: ‘./script.sh', into: '.'
sshScript remote: remote, script: "doc.sh ${Username}"
}
}
}
}
}
}
}
getting below error while executing the script
/home/jenkins/workspace/script.sh Username does not exist.
I have written a simple pipeline script to show the usage of variable in bash script.
Pipeline script:
pipeline {
agent any
parameters {
choice(name: 'stageparam', choices: ['build', 'deploy'], description: 'enter stageparam')
string(name: 'USERNAME', defaultValue: 'abc', description: 'enter username')
}
stages {
stage('Git Pull')
{
steps {
checkout([$class: 'GitSCM', branches: [[name: '*/master']], doGenerateSubmoduleConfigurations: false, extensions: [], submoduleCfg: [], userRemoteConfigs: [[credentialsId: 'gitlab-test', url: 'http://localhost:8076/test-upgrade.git']]])
}
}
stage('Run the python script') {
steps {
sh 'chmod 777 test.py'
sh "python test.py ${env.WORKSPACE}"
}
}
stage('Run the bash script') {
steps {
sh 'chmod 777 run.sh'
sh "./run.sh ${env.USERNAME}"
}
}
}
}
Output:

Terraform remote state in Jenkins

am working on a declarative Jenkins pipeline for Terraform deployments. I want to have the terraform state stored remotely in my s3 I get an error
terraform apply -input=false ow.plan
[31mFailed to load Terraform configuration or plan: open ow.plan: no such file or directory
Any suggestions?
Here's my code:
pipeline {
agent any
tools {
"org.jenkinsci.plugins.terraform.TerraformInstallation" "terraform"
}
parameters {
choice(name: 'action', choices: 'create\ndestroy', description: 'Create/update or destroy the eks cluster.')
string(name: 'cluster', defaultValue : 'demo', description: "EKS cluster name;eg demo creates cluster named eks-demo.")
string(name: 'vpc_network', defaultValue : '10.0', description: "First 2 octets of vpc network; eg 10.0")
string(name: 'num_subnets', defaultValue : '3', description: "Number of vpc subnets/AZs.")
string(name: 'instance_type', defaultValue : 'm4.large', description: "k8s worker node instance type.")
string(name: 'num_workers', defaultValue : '3', description: "k8s number of worker instances.")
string(name: 'api_ingress_cidrs', defaultValue : '0.0.0.0/0', description: "k8s api ingress cidrs; space delimited list.")
string(name: 'jenkinsfile', defaultValue : '', description: "Jenkins credential that provides the AWS access key and secret.")
string(name: 'region', defaultValue : 'eu-west-1', description: "AWS region.")
}
environment {
TF_HOME = tool('terraform')
TF_IN_AUTOMATION = "true"
PATH = "$TF_HOME:$PATH"
AWS_ACCESS_KEY_ID = credentials('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = credentials('AWS_SECRET_ACCESS_KEY')
TF_VAR_access_key = credentials('AWS_ACCESS_KEY_ID')
TF_VAR_secret_key = credentials('AWS_SECRET_ACCESS_KEY')
}
stages {
stage('Setup') {
steps {
script {
currentBuild.displayName = "#" + env.BUILD_NUMBER + " " + params.action + " eks-" + params.cluster
plan = params.cluster + '.plan'
}
}
}
stage('TF Plan') {
when {
expression { params.action == 'create' }
}
steps {
dir('infra/terraform/eks'){
script {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
credentialsId: awsCredentialsId,
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
// Format cidrs into a list array
def ips = '["' + params.api_ingress_cidrs.replaceAll(/\s+/,'\",\"') + '"]'
sh """
terraform init
terraform workspace new ${params.cluster} || true
terraform workspace select ${params.cluster}
terraform plan \
-var cluster-name=${params.cluster} \
-var vpc-network=${params.vpc_network} \
-var vpc-subnets=${params.num_subnets} \
-var inst-type=${params.instance_type} \
-var num-workers=${params.num_workers} \
-var 'api-ingress-ips=${ips}' \
-out ${plan}
"""
}
}
}
}
}
stage('TF Apply') {
when {
expression { params.action == 'create' }
}
steps {
script {
input "Create/update Terraform stack eks-${params.cluster} in aws?"
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
credentialsId: awsCredentialsId,
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
sh """
terraform apply -input=false ${plan}
"""
}
}
}
}
stage('TF Destroy') {
when {
expression { params.action == 'destroy' }
}
steps {
script {
input "Destroy Terraform stack eks-${params.cluster} in aws?"
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding',
credentialsId: awsCredentialsId,
accessKeyVariable: 'AWS_ACCESS_KEY_ID',
secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
sh """
terraform workspace select ${params.cluster}
terraform destroy -auto-approve
"""
}
}
}
}
}
}

how to execute Javascript in shell

JS:
#!/usr/bin/env node
const util = require("util"),
inquirer = require("inquirer")
function promptUser() {
inquirer.prompt([
{
type: "list",
name: "scheme",
message: "How would you like to build",
choices: [
{
name: "build develop",
value: "develop"
}, {
name: "build product",
value: "product"
}
]
}
], earlyAnswers => {
console.log(earlyAnswers.scheme);
});
}
promptUser();
it will ask a question and I can select a answer, log answer, like:
I want to exec this js in shell:
#!/bin/sh
echo 123
a=`node ./test2.js`
let a = answer, but can't exec like:
how do I exec this JS?
Your javascript is actually running, but your backticks in the command
a=`node ./test2.js`
cause the shell to capture standard out and assign it to the variable a. All of stdout is captured, not just the last line, so nothing is presented to the user.
One possible way to solve this is to use the node.js process's exit code.
Here is a generic script that will accept the prompt an options on the command line, and set its exit code to the zero-based index of the selection.
#! /usr/bin/env node --harmony_destructuring
util = require("util");
inquirer = require("inquirer");
var [program, file, textprompt, ...args] = process.argv
var choices = [];
args.forEach(function(E) {
var [name, value] = E.split(":",2);
choices.push({name: name, value: value});
});
function promptUser() {
inquirer.prompt([
{
type: "list",
name: "scheme",
message: textprompt,
choices: choices
}
]).then(function(answer) {
if (answer && answer.scheme) {
position = choices.findIndex(function(E) { return E.value == answer.scheme; });
process.exit(position);
} else {
process.exit(-1);
}
})
}
if (choices.length > 0)
promptUser();
Which could be used like this:
#! /bin/bash
./test2.js "How would you like to build?" "build develop:develop" "build product:product"
selected=$?
case $selected in
0) echo "Selected develop";;
1) echo "Selected product";;
*) echo "Invalid selection";;
esac

Resources