1. Tools directory file structure

[root@www tools]# ├─ trash ├── org.org.txt 2 directories, 2 filesCopy the code

2. Compress the history file script gzip_history_files

[root@www tools]# more tools/bin/gzip_history_files #! /bin/sh # Compress the files in the specified directory whose time is earlier than the specified time node. The time granularity is hour. Compression directory of hours = # # # define restricted path path = "/ bin: / usr/bin: / sbin, / usr/sbin" # adirname - return absolute dirname of given file adirname() { odir=`pwd`; cd `dirname $1`; pwd; cd "${odir}"; } # --------- # constants # --------- MYNAM=`basename "$0"` MYDIR=`adirname "$0"` MYCFG="${MYDIR}/.. /etc/${MYNAM}.cfg" MYTMP="${MYDIR}/.. /tmp" MYLCK="${MYTMP}/${MYNAM}.lock" # perform some locking (as good as it gets in a shell) [ -s "${MYLCK}" ] && kill -0  `cat "${MYLCK}"` 2>/dev/null && die "${MYNAM}: already running!" echo "$$" > "${MYLCK}" PATHS=(`cat ${MYCFG}`) for PP in ${PATHS[@]} do APP_PATH=`echo ${PP} | awk -F'=' '{print $1}'` N=`echo ${PP} | awk -F'=' '{print $2}'` if [ -d ${APP_PATH} ] ; then T=`/bin/date --date "${N} hours ago" "+%Y%m%d%H%M"` TMP_FILE="/tmp/`echo ${PP} | md5sum | awk '{print $1}'`" touch -t ${T} ${TMP_FILE} find ${APP_PATH} ! -newer ${TMP_FILE} -type f ! -name '*.gz' | xargs gzip > /dev/null 2> /dev/null fi done rm -rf ${MYLCK}Copy the code

3. Compress the historical file script configuration file gzip_history_files.cfg

[root@www tools]# more tools/etc/gzip_history_files. CFG # Directory to be compressed = hours /home/logs/nginx=72 /home/logs/varnish=72Copy the code

4. Configure scheduled tasks

[root@www tools]# more /etc/crontab
SHELL=/bin/bash
PATH=/sbin:/bin:/usr/sbin:/usr/bin
MAILTO=root
HOME=/
 
# gzip old logs
03 05 * * * root /home/tools/bin/gzip_history_files
Copy the code

The files generated three days ago in the configuration file directory are compressed at 5:03 a.m. every day.