The backup
docker export jenkins > jenkins.tar
The installation
docker run -d --name jenkins \
-p 50000:50000 \
-p 7070:8080 \
-v /data/jenkins:/var/jenkins_home \
-v /var/run/docker.sock:/var/run/docker.sock \
-v /usr/bin/docker:/usr/bin/docker \
-v /root/.ssh:/root/.ssh \
-u root \
--restart always \
jenkinsci/blueocean
Copy the code
Account
http://xxx.xxx.xxx:7070/
admin
admin68Zt7zcR37ZbVChU
dev
dev
Copy the code
pipeline
#! groovy// Jenkins parameterized build options in Datadog Tagging configuration
Env_name = "${env_name}"
Tag_Preview_by_ops = ":preview_by_ops"
Dockerfile = '/Dockerfile '
Src = 'src/'
GroupURL = 'http://xxx.xxx.xxx:8082/new_mall/'
Group_SSH_URL = '[email protected]:new_mall/'
GitLabCredentialsId = 'lingjie_gitlab'
DockerHubRegistry = 'http://localhost:5000/'
node {
timestamps {
def BaseRepository = [
'pb'.'pkg'.'xq'
] as List
def ServerArray = [
'backend_http_gate'.'account'.'product'.'coupon'.'order'.'http_proxy'.'mall'.'money'.'notice'.'pay'.'after_service'.'recharge'.'statistics',]as List
stage('prepare data dir'){
BaseRepository.addAll(ServerArray)
script{
// https://blog.k4nz.com/63f9f5c157227d3c2f8a8cca18b3356c/
// https://www.debugcn.com/article/22628704.html
//todo
// sh 'docker cp /home/build_cache/ jenkins:/var/jenkins_home/workspace/pre_build/build'
}
script {
build_file = "${env.WORKSPACE}/build"
// build_file = "build"
if(! fileExists(build_file)){ error("Build dir does not exist")
}
}
}
stage('parreller checkout source code') {def branche_name = ' '
switch(Env_name) {
case 'prebuild':
branche_name = '1.0.0'
break
// case 'prod':
// //todo Prod branch update
// error('no over')
// break
}
dir(Src){
def Map = BaseRepository.collectEntries {
// def checkout_ext = [[$class: 'CleanCheckout'], [$class: 'CleanBeforeCheckout']] // calls git clean -fdx and git reset --hard
[
"${it}" : {
checkout(
[
$class: 'GitSCM'.branches: [[name: "origin/" + branche_name]],
doGenerateSubmoduleConfigurations: false.extensions: get_extensions(it),
submoduleCfg:[].userRemoteConfigs:[[// credentialsId: GitLabCredentialsId,
// url: 'http://xxx.xxx.xxx:8082/new_mall/' + it + '.git'
// todo
url: 'ssh://[email protected]:8082/new_mall/' + it + '.git'
]
]
]
)
}
]
}
parallel Map
}
}
stage('checkout repo dockerfile') {
dir('repo_dockerfile') {
checkout(
[
$class: 'GitSCM'.branches: [[name: '*/master']],
userRemoteConfigs: [
// Gitlab
[url: 'ssh://[email protected]:11180/ops/dockerfiles.git']
],
]
)
}
parallel ServerArray.collectEntries {
[
"${it}" : {
sh('cp repo_dockerfile/' + "${it}" + Dockerfile + ' ' + Src + "${it}" + Dockerfile)
}
]
}
}
stage("Configuration file environment Replacement") {
// Check out the source code and replace it according to the environment
def _tar = '.tar'
dir('env_config'){
checkout(
[
$class: 'GitSCM'.branches: [[name: '* /'+ Env_name]],
userRemoteConfigs:[[url: 'ssh://[email protected]:10022/ops/configs.git']
],
]
)
parallel ServerArray.collectEntries {
[
"${it}" : {
dir("${it}/app"){
sh 'tar -cvf '+ "${it}" + _tar +'/ *'
sh 'tar -xvf ' + "${it}" + _tar + ' -C '+ "${env.WORKSPACE}/" + 'src/' + "${it}/"
}
}
]
}
}
}
stage('build image') {// Do not use it
// parallel ServerArray.collectEntries { ["${it}" : gen_docker_build_stage(it)] }
try {
ServerArray.each({ item ->
docker.withRegistry( DockerHubRegistry ) {
def image = docker.build(
DockerHubRegistry +'mall-'+ item + Tag_Preview_by_ops,
'-f src/' + item + Dockerfile + ' .'
)
image.push()
}
})
} catch (Exception err) {
throw err
image_prune()
} finally {}
}
stage('by ' + Env_name + 'env compose deploy up') {// System configuration
dingtalk (
robot: 'dingding_robot'.type:'LINK'.// TEXT
atAll: false.title: "deplody".messageUrl: "${BUILD_URL}".text: ["deploy "+ Env_name + " ${BUILD_URL}\n" +"docker compose up"])//todo
// Environmental judgment
sh(
//"ssh [email protected] 'pushd /home/data/project/ops_build_mall; docker-compose pull ; docker-compose up -d'"
"ssh [email protected] 'pushd /home/data/project/ops_build_mall; docker-compose config; '"
)
}
stage('clean'){
deleteDir()
cleanWs()
dir('src') {
image_prune()
}
}
}
}// timestamps
def image_prune() {
sh 'docker image prune -f'
}
def get_extensions(item) {
return [
// Rename the directory
[
$class: 'RelativeTargetDirectory'.relativeTargetDir: item
],
// git shallow copy
[
$class: 'CloneOption'.depth: 1.reference: ' '.shallow: true.timeout: 60]]}// Todo do not use parallel build very card
def gen_docker_build_stage(job) {
return {
stage("${job}") {
docker.withRegistry( DockerHubRegistry ) {
def image = docker.build(
DockerHubRegistry + 'mall-' + "${job}" + Tag_Preview_by_ops,
"-f src/" + "${job}" + '/Dockerfile ' + ' .'
)
image.push()
}
}
}
}
Copy the code
Docking gitlab API
stage('http') {
def response = httpRequest (
contentType: 'APPLICATION_JSON'.httpMode: 'GET'.customHeaders:[[name: 'PRIVATE-TOKEN'.value: 'b-RY6yysoxksnssXNV3Q']],url: 'http://xxx.xxx.xxx:8082/api/v4/groups/8'
)
def props = readJSON text: response.content
def projects = props.projects
for (project in projects) {
// array add item
ProjectNameArray << project.name
}
//def data = props['projects']
// data.find{
// if(it.startsWith("b")){
// return true
/ /}
// return false
// }
//println data
}
Copy the code
The plug-in
thinBackup
SSH Pipeline Steps
Pipeline Utility Steps
Copy the code