We should back up our data regularly

The target

The technical requirements

  1. js,nodejs
  2. Basic shell scripting skills
  3. Tools rely on
    • shelljs: js Write shell scripts
    • qiniu: Upload seven niuyun
    • node-scheduleTiming: nodejs

Train of thought

  1. Export Mongodump from mongodb database
  2. Shell export package compression (shell script can be directly written, shellJS can also be written, shellJS as an example in this paper)
  3. Nodejs upload cloud storage (This paper takes Qiniuyun as an example)

Upload files to qiniu Cloud storage

const config = {
  / / seven cows
  qiniu: {
    bucket: 'db-dump-server'.accessKey: 'ssK9nIwjU**********2UOoQcW5eWOosHh7yX09'.secretKey: 'EQ0IRb**************4QbsJO5sbZcizuM'}}const qiniu = require('qiniu')

// 7 ox note
const bucket = config.qiniu.bucket
const accessKey = config.qiniu.accessKey
const secretKey = config.qiniu.secretKey
const mac = new qiniu.auth.digest.Mac(accessKey, secretKey)
const cfg = new qiniu.conf.Config()
// const bucketManager = new qiniu.rs.BucketManager(mac, cfg)

const options = {
  scope: bucket
}
const putPolicy = new qiniu.rs.PutPolicy(options)
const uploadToken = putPolicy.uploadToken(mac)

// Space corresponding to the machine room
// config.zone = qiniu.zone.Zone_z2
const formUploader = new qiniu.form_up.FormUploader(cfg)
const putExtra = new qiniu.form_up.PutExtra()

// local - upload to qox
const uploadToQiniu = (localFile, key) = > {
  const msg = 'Local resources, error uploading seven cows'
  // File upload
  return new Promise((resolve, reject) = > {
    formUploader.putFile(
      uploadToken,
      key,
      localFile,
      putExtra,
      (respErr, respBody, respInfo) => {
        if (respErr) {
          reject({ msg, respErr })
        } else {
          if (respInfo.statusCode === 200) {
            resolve(respBody)
          } else {
            reject({ msg, respInfo })
          }
        }
      }
    )
  })
}

/ /! (async () => {
// console.log('start')
// const res = await uploadToQiniu(path.resolve(__dirname, './README.md'))
// console.log('end', res)
/ /}) ()

module.exports = uploadToQiniu
Copy the code

Shelljs script writing

const path = require('path')
const shell = require('shelljs')
// shell.echo('mongodump start')

// Encapsulate asynchronous exec
const exec = async exec => {
  return new Promise((resolve, reject) = > {
    shell.exec(exec, { async: true }, data => {
      resolve(data)
    })
  })
}

const target = './dump'
const targetPath = path.join(target)

@param {string} db Database instance name */
const mongodump = async db => {
  await exec(`mongodump -h localhost:27017 -d ${db} -o ${targetPath}`)
  const key = `${db}-dump.The ${new Date().getTime()}.tar.gz`
  const newFile = path.join(target, db, key)
  const sourceFilePath = path.join(target, db)
  await exec(`tar -zcvf ${newFile} ${sourceFilePath}`)
  // console.log(res)
  return {
    newFile,
    key
  }
}
// mongodump()

// Delete the compressed file
const removeFile = async (path = targetPath) => {
  await exec(`rm -rf ${path}} `)}module.exports = {
  mongodump,
  removeFile
}
Copy the code

Nodejs is executed periodically

const schedule = require('node-schedule') const upToQiniu = require('./upToQiniu') const { mongodump, removeFile } = require('./shell/') const run = async () => { console.log('start upload and remove.... ') const st = new Date().gettime () // mongodb-data const {newFile, Key} = await mongodump('card') // upload await upToQiniu(newFile, // await removeFile() const et = new Date().getTime() console.log(' console.log ') ${1000} (et - st)/s `)} the run () / / / / every Monday at 3 a.m. to perform a backup / / schedule scheduleJob (' 30 3 * * * / 1, async () = > {/ / the run () / /})Copy the code

The resources

  • Database Export