0
点赞
收藏
分享

微信扫一扫

工作中常用的shell脚本(不通用,可借鉴)

1、跳转主机脚本(可选择菜单自动执行下一步命令)

注:以下IP为公司内网IP,最后一位已做处理,接口已做处理;

#!/bin/bash
echo '========================================================================================'
echo '注:非以下4个IP外(输入000查看其他IP),需输入俩次,首次登陆IP时弹出yesORno,选择YES即可'
#     cc.txt中可以加入常用登陆IP,无需全部都加,也可以将IP加到脚本里;
#     bb.txt为登陆后的常用路径,记不住路径的可以加到里面;
echo '========================================================================================'
echo '失败归档:10.252.174.1'
echo '归档流水重发:10.252.174.2'
echo '重抛签约归档报文:10.252.175.3'
echo '模拟客户经理登陆:10.252.174.4'
#echo '======================================================================================='
HEADSCRIP=cd /rdata1/work_tools/headScripcd >/dev/null 2>&1
TRANSIDO=curl -H "Content-Type: application/json" -X GET http://接口='流水' >/dev/null 2>&1
RECIVEPROV=curl -H "Content-Type: application/json" -X POST --data '{"prov":报文}' 接口 >/dev/null 2>&1
STAFFMONI=模拟客户经理登陆 >/dev/null 2>&1
USER=app
USERA=`whoami`
echo '========================================================================================'
echo `cat ./bb.txt|awk 'NR==1'`
echo `cat ./bb.txt|awk 'NR==2'`
echo `cat ./bb.txt|awk 'NR==3'`
echo `cat ./bb.txt|awk 'NR==4'`
echo '========================================================================================'
read -p "请输入要登陆的IP地址:" commond

if [ -n $commond ]
  then echo '输入密码:Paas'
fi

if [ $commond = "10.252.174.1" ]
  then
    echo "1:失败归档/2:归档流水重发" &&
      read -p "请输入 1or2:" nummer

  if [ $nummer = "1" ]
     then ssh $USER@$commond && $HEADSCRIP
#         if [ $? -eq 0 ]
#            then  $HEADSCRIP
#         fi
       else $TRANSIDO && ssh $USER@$commond
  fi
elif
       [ $commond = "10.252.175.2" ]
        then ssh $USER@$commond && $RECIVEPROV
elif
       [ $commond = "10.252.174.3" ]
        then $STAFFMONI && ssh $USER@$commond
elif
       [ $commond = "000" ]
        then cat ./cc.txt
fi

if [ $commond != "10.252.174.1" ] &&
   [ $commond != "10.252.175.2" ] &&
   [ $commond != "10.252.174.3" ]
     then read -p "请输入要登陆的IP地址:" commond && ssh $USER@$commond
fi

if [ $USERA = app ]
  then top
fi

2、日月对账核查脚本

#!/bin/bash
################
#name:张晓伟   #
#date:20220301 #
################

MONTH=`date +%Y%m`  #当前月份
MONTHDAY=`date +%Y%m%d` #当天
DATEONE=`date -d "1 day ago" +"%Y%m%d"`  #当前一天前一天
echo '=========================================='
#差异文件:
cd /rdata1/absfile/abs-file-cust/bak/$MONTH
echo '差异文件今天生成结果:'
ECNUMA=`ls -rt *EC*$MONTHDAY*|wc -l`
PONUMA=`ls -rt *PO*$MONTHDAY*|wc -l`
MONUMA=`ls -rt *MO*$MONTHDAY*|wc -l`
echo EC--应为大于等于31,实为:  $MONTHDAY: $ECNUMA
echo PO--应为等于31,实为:  $MONTHDAY: $PONUMA
echo MO--应为等于31,实为:  $MONTHDAY: $MONUMA
echo '=========================================='
################################################################
#日月对账:
cd /rdata1/abs-file-check/source/bak/$MONTH
echo 'MO,PO对账文件今天生成结果:'
MONUMB=`ls -l *MemberInfo*$MONTHDAY*|awk -F '[_.]' '{print $3}'|uniq -c |sort -rn|awk -F '[ ]+' '{print $3}'|wc -l`
PONUMB=`ls -l *ProductOrder*$MONTHDAY*|awk -F '[_.]' '{print $3}'|uniq -c |sort -rn|awk -F '[ ]+' '{print $3}'|wc -l`

echo MemberInfo--数量应为32,实为:  $MONTHDAY: $MONUMB
#if [ $MONUMB -lt 32 ]
#  then echo
echo ProductOrder--数量应为32,实为:  $MONTHDAY: $PONUMB
echo '=========================================='
#################################################################
#监控文件
echo '日月对账监控文件生成结果:'
DYDIR=/rdata1/abs-file-cust/bak/$MONTH
EARDIR=JSJKYJ_D_002_01_$DATEONE.txt
cd $DYDIR
if [ -f JSJKYJ_D_002_01_$MONTHDAY.txt ]
  then #echo '日月对账监控文件今日正常生成'
       ls JSJKYJ_D_002_01_$MONTHDAY.txt
    else cp $EARDIR JSJKYJ_D_002_01_$MONTHDAY.txt
fi
echo '=========================================='
#################################################################
#经分文件
echo '经分文件生成个数:'
ls -l /home/appop/app/udm/bi/report/20045/*$DATEONE*_001.verf|wc -l
if [ `ls -l /home/appop/app/udm/bi/report/20045/*$DATEONE*_001.verf|wc -l` -le 6 ]
  then sh /home/appop/app/udm/alltask.sh
    else
  echo '已执行zxw.sh'
fi
echo '=========================================='

3、每月定时生成固定文件并判断文件生成数量是否正确

# cat riduizhang.sh
#!/bin/bash
#################################################
#zhangxiaowei
#20220620
#月对账文件生成,每月17号0点,并判断文件生成数量是否正确
#################################################

MONTH=`date +%F|cut -c1-4,6-7`   #本月
MONTHONE=`date -d "1 month ago" +%Y%m`  #上月
PROVINCE=`cat /scripts/province.txt` #省 100_1.txt
IFS=$'\n'
DIR=/rdata1/absfile/abs-file-cust/bak/$MONTH
cd $DIR
#创建文件
for PROV in $PROVINCE
  do
    touch BBOSS_Y_EC_${MONTHONE}_$PROV
    touch BBOSS_Y_PO_${MONTHONE}_$PROV
    touch BBOSS_Y_MO_${MONTHONE}_$PROV
    chmod 777 $DIR/*${MONTHONE}_$PROV
done
#输出文件数量
NUMONE=`ls -l $DIR/BBOSS_Y_*_$MONTHONE_*_1.txt|wc -l`
#判断文件是否为93
if [ -f "*Y*${MONTHONE}_$PROV" ]
  then echo '文件不存在'
    else
      if [ $NUMONE -eq  93 ]
        then echo '文件存在且数量等于93'
          else echo '文件存在但数量不等于93'
      fi
fi

#===========================
[appop@VM-16-13-centos scripts]# cat province.txt
100_1.txt
200_1.txt
210_1.txt
220_1.txt
230_1.txt
240_1.txt
250_1.txt
270_1.txt
280_1.txt
290_1.txt
311_1.txt
351_1.txt
371_1.txt
431_1.txt
451_1.txt
471_1.txt
531_1.txt
551_1.txt
571_1.txt
591_1.txt
731_1.txt
771_1.txt
791_1.txt
851_1.txt
871_1.txt
891_1.txt
898_1.txt
931_1.txt
951_1.txt
971_1.txt
991_1.txt

4、平台反馈文件稽核脚本

#!/bin/bash

#NO.1
MONTH=`date +%Y%m`  #当前月份
MONTHDAY=`date +%Y%m%d` #当天
DATEONE=`date -d "1 day ago" +"%Y%m%d"`  #当前一天前一天
NEWDIR=/home/app/app/udm/bi/report/20045/

#NO.2
#进入反馈目录
cd $NEWDIR
#查看反馈文件数量
NUMONE=`ls -l *r_*$DATEONE*|wc -l`
if [ $NUMONE -ne 9 ]
  then echo '反馈文件不等于:9,实际为:$NUMONE'
fi

echo $? > /dev/null & 2<&1
if [ $? -eq 0 ]
  then cd ../ && cd ./20045/
fi

#取返回文件是否正常
NUMTWO=`cat *r_*$DATEONE*|awk '{print $2}'|cut -c 7-8|sort -rn|uniq -c|sort|awk '{print $NF}'|wc -l`

if [ $NUMTWO -ne 1 ]
  then echo $NUMTWO #&& cat *r_*$DATEONE*
    else echo '1'
fi

5、EAI微服务单实例内存使用率监控

单个输出
$ cat max.sh
#!/bin/bash
LOGDIR=/eailogs/eai-gateway-server
DATE=$(date "+%Y-%m-%d")
cd $LOGDIR
FILEN=`ls bboss-new-eai-*$DATE.log|xargs -n1`

for i in $FILEN
  do
   USE=`grep  directmemory $i |grep -o "use.*" |awk -F[:,] '{print $2}' |tail -1`
   MAX=`grep  directmemory $i |grep -o "max.*" |awk -F ':' '{print $2}' |tail -1`
   A=`awk 'BEGIN{printf "%2f\n",'$USE'/'$MAX'*'100'}'`
   #B=`grep  directmemory $i|grep -o "use.*"|tail -1`
   #AA=`awk 'BEGIN{srand();print int(\$A*100)}'`
echo $i-----$A"%"
done

bboss-new-eai-7cbc45976d-27dsf-2022-12-29.log-----11.425433%
bboss-new-eai-7cbc45976d-fsq7v-2022-12-29.log-----14.940950%
bboss-new-eai-7cbc45976d-j7n6m-2022-12-29.log-----52.293326%
bboss-new-eai-7cbc45976d-mcg7s-2022-12-29.log-----54.051085%
bboss-new-eai-7cbc45976d-vt8wl-2022-12-29.log-----53.172206%
bboss-new-eai-7cbc45976d-wf7tn-2022-12-29.log-----13.183192%

定时任务输出
内存泄露:
脚本路径160.72
cd /home/appop/script/max.sh
定时任务
*/10 * * * * sh /home/appop/script/max.sh >> /tmp/eaimax.txt
指定时间查看
cat /tmp/eaimax.txt |grep  -B 1 -C 6 "2022-12-30 10:30"

6、数据库回流告警

#!/bin/bash
hostONE=10.252.175.**
hostTWO=10.252.175.**
monitor='***'
  user=moniter
  database=abs_bomc
  ONE= `greatdb  -u$user -h$hostONE -p$monitor -P16310 $database -sN -e "show master status\G"|grep -o "seconds behind master:"`
  TWO= `greatdb  -u$user -h$hostTWO -p$monitor -P16310 $database -sN -e "show master status\G"|grep -o "seconds behind master:"`
if [ $ONE = $TWO ]&& echo 1|| echo 0
=1回流正常
=0回流异常

7、清理90天前zabbix历史数据

#!/bin/bash
source /rdata1/encrypt/decrypt.sh
############################
#清理90天前zabbix历史数据
#张晓伟
#20230309
############################
Date=`date -d $(date -d "-90 day" +%Y%m%d) +%s` #取90天之前的时间戳
mysql -uapp -p$db_pwd_mysql -h10.252.174.** -e "
use app;
DELETE FROM history WHERE clock < $Date;
optimize table history;
DELETE FROM history_str WHERE clock < $Date;
optimize table history_str;
DELETE FROM history_uint WHERE clock < $Date;
optimize table history_uint;
DELETE FROM  trends WHERE clock < $Date;
optimize table  trends;
DELETE FROM trends_uint WHERE clock < $Date;
optimize table trends_uint;
DELETE FROM events WHERE clock < $Date;
optimize table events;
DELETE FROM history_text WHERE clock < $Date;
optimize table history_text;
DELETE FROM history_log WHERE clock < $Date;
optimize table history_log;
"

8、SFTP/FTP下载文件脚本

sftp
=====
#!/bin/bash
HOST='sftp.example.com'
PORT='22'
USER='username'
PASS='password'
FILE='example.txt'

# 连接到SFTP服务器并下载文件
/usr/bin/sftp -oPort=$PORT $USER@$HOST <<EOF
   cd /remote/directory/
   get $FILE
   bye
EOF

#将上述的 HOST,PORT,USER,PASS 和 FILE 替换为你连接的 SFTP 服务器的信息和需要下载或上传的文件名。此外,请确保在运行脚本之前设置了正确的权限以避免任何安全问题。


ftp
=====
#!/bin/bash

# FTP服务器地址
ftp_server="your_ftp_server"

# FTP用户名
ftp_user="your_ftp_username"

# FTP密码
ftp_pass="your_ftp_password"

# 本地文件路径
local_path="/local/path/to/file"

# 远程文件路径
remote_path="/remote/path/to/file"

# 创建一个临时文件来保存FTP命令
ftp_commands="/tmp/ftp_commands.txt"

# 将FTP命令写入临时文件
echo "user ftpuserftp_user ftpuserftp_pass" >> $ftp_commands
echo "cd remote_path" >> ftp_commands
echo "put local_path" >> ftp_commands
echo "quit" >> $ftp_commands

# 使用临时文件连接FTP并执行命令
ftp -n ftp_server < ftp_commands

# 删除临时文件
rm $ftp_commands
脚本中的变量需要根据实际情况进行修改,其中:

$ftp_server:FTP服务器地址,需要替换为实际的FTP服务器地址。
$ftp_user:FTP用户名,需要替换为实际的FTP用户名。
$ftp_pass:FTP密码,需要替换为实际的FTP密码。
$local_path:本地文件路径,需要替换为实际的本地文件路径。
$remote_path:远程文件路径,需要替换为实际的远程文件路径。
执行脚本后,即可免密登录FTP,并将本地文件上传到远程FTP服务器。

9、批量监控微服务状态

#调用服务健康检查接口
#!/bin/bash
#结果=1 服务正常
#结果=0 服务异常
#定义函数作为取值
test(){
a=0
b=3
c=0
while [ $a -lt $b ]
do
    curl -s -m 3 -I $path  | grep -o "200" &>/dev/null
     [ $? -eq 0 ] && let "c++"
         let "a++"
done
echo $c
}

case $1 in
        abs-auth-job)
path=http://接口地址:端口/actuator/health/
if [ `test` -eq 3 ];then
    echo "1"
else
    echo "0"
fi
        ;;
        abs-boss-detail)
path=http://接口地址:端口/abs/health/check
if [ `test` -eq 3 ];then
    echo "1"
else
    echo "0"
fi
        ;;
		bboss-bpm-send)
path=http://接口地址:端口/actuator/health
if [ `test` -eq 3 ];then
    echo "1"
else
    echo "0"
fi
        ;;
		new-abs-sync-cust)
path=http://接口地址:端口/abs/health/check
if [ `test` -eq 3 ];then
    echo "1"
else
    echo "0"
fi
        ;;
		omss-maintenance)
path=http://接口地址:端口/actuator/health
if [ `test` -eq 3 ];then
    echo "1"
else
    echo "0"
fi
        ;;
		     *)
echo "syntax error"
        ;;
esac

10、判断微服务当天日志是否打印

脚本路径:/home/app/script/zxw/log_no_print.sh

!/bin/bash
#判断微服务当天日志是否打印

TODAY=`date +%F|tr -d '-'`
SERVLIST=`cat /tmp/service.txt|xargs -n 1`
for service in $SERVLIST
do
        count=`ls -l /abslogs/$service/*$TODAY*.log|wc -l`
        if [ $count -gt 0 ]
        then
            echo "$service 1" #1正常
        else
            echo "$service 0" #0异常
        fi
done

11、判断服务日志是否未实时更新

脚本路径:/home/app/script/zxw/log_no_update.sh

#!/bin/bash
SERVLIST=`cat /tmp/service.txt|xargs -n 1`
#LOG_DIR="/abslogs/abs-svc-cust"
LAST_RUN_FILE="/tmp/last_run"
DATEONE=`date +%F|tr -d '-'`

for i in $SERVLIST
  do
    LOG_DIR="/abslogs/$i"


# 判断上次运行时间文件是否存在
if [ ! -f "$LAST_RUN_FILE" ]; then
    touch "$LAST_RUN_FILE"
fi

# 获取上次运行时间
LAST_RUN=$(cat "$LAST_RUN_FILE")

# 获取最新的日志文件
LATEST_LOG=$(ls -t "$LOG_DIR"/*$DATEONE*.log | head -1)

# 获取最新的日志时间
LATEST_LOG_TIME=$(stat -c %Y "$LATEST_LOG")

# 如果最新的日志时间比上次运行时间晚,则日志更新正常
if [ $LATEST_LOG_TIME -gt $LAST_RUN ]; then
    echo "$i 1" #1正常
else
    echo "$i 0" #0异常
fi

done

# 记录当前运行时间
date +%s > "$LAST_RUN_FILE"

12、判断服务日志是否有报错

脚本路径:/home/app/script/zxw/log_have_err.sh

#!/bin/bash
#获取服务日志是否有报错(某个关键字)

TODAY=`date +%F|tr -d '-'`
SERVLIST=`cat /home/app/script/zxw/service.txt|xargs -n 1`

for service in $SERVLIST
do
	LOG_DIR="/abslogs/$service"
	LATEST_LOG=$(ls -t "$LOG_DIR"/*$TODAY*.log | head -1)
	if grep -q "error\|warn\|info" $LATEST_LOG;then
	echo $LATEST_LOG
      	fi
done
待收集报错关键字

版本二:
#!/bin/bash
#获取服务日志是否有报错(某个关键字)

TODAY=`date +%F|tr -d '-'`
SERVLIST=`cat /home/app/zxw/service.txt|xargs -n 1`

if [ -f /tmp/money_error.log ];then
	echo " " >>/dev/null || touch /tmp/money_error.log
fi

for service in $SERVLIST
do

LOG_DIR="/abslogs/$service"

LATEST_LOG=$(ls -t "$LOG_DIR"/*$TODAY*.log | head -1)

	if grep -q "计费异常" $LATEST_LOG
		then
		echo $TODAY $service 计费异常 >> /tmp/money_error.log
			else
			if
				[ ! -d $LOG_DIR ]
			then
				exit 0
			fi
	fi
done

13、判断多实例服务是否有夯死

#!/bin/bash

SERVICELIST=`cat ~/moreservice.txt|xargs -n 1`  #多实例服务列表
DATEONE=`date +%F|tr -d '-'` #当天时间

for i in $SERVICELIST
  do
    LOG_DIR="/abslogs/$i/"

LATEST_LOG=$(ls -t $LOG_DIR*$DATEONE*| head -1)  #最新日志
LATEST_LOG_TIME=$(stat -c %Y $LATEST_LOG)  #最新日志时间

NEVER_LOG=$(ls -t  $LOG_DIR*$DATEONE*|tail -1) #最老日志
NEVER_LOG_TIME=$(stat -c %Y $NEVER_LOG)  #最老日志时间

#TIMEONE=`echo ${$LATEST_LOG_TIME - $NEVER_LOG_TIME}`

TIMEONE=`awk 'BEGIN{print $LATEST_LOG_TIME-$NEVER_LOG_TIME}'`
#最新生成日志时间-最老生成时间,大于5分钟告警

if [ $TIMEONE -gt 300 ]
  then  echo "$i is 1" #1=NO
    else
	echo "$i is 0" #0=YES
fi

done

#0=正常
#1=异常

14、批量检查经分文件是否返回正常(00为正常)

#!/bin/bash

MONTH=`date +%Y%m`  #当前月份
MONTHDAY=`date +%Y%m%d` #当天
DATEONE=`date -d "1 day ago" +"%Y%m%d"`  #当前一天前一天

cd /rdata1/absfile/hqbi/bi/check/bak/$MONTH

for i in `ls -l *r_*$DATEONE*|awk -F ' ' '{print $NF}'`
do
	ONE=`ls -l *r_*$DATEONE*|awk -F '_' '{print $5}'`
	TWO=`cat $i|awk '{print $2}'|cut -c 7-8|uniq -c|awk '{print $2}'|xargs`
        echo $DATEONE $i $TWO
done

$ sh hqbi_chack.sh
20230813 r_a_20045_20230813_45014_00.verf 00
20230813 r_a_20045_20230813_45016_00.verf 00
20230813 r_a_20045_20230813_45019_00.verf 00
20230813 r_a_20045_20230813_45034_00.verf 00
20230813 r_a_20045_20230813_45035_00.verf 00
20230813 r_a_20045_20230813_45036_00.verf 00
20230813 r_a_20045_20230813_45037_00.verf 00
20230813 r_a_20045_20230813_45038_00.verf 00
20230813 r_a_20045_20230813_45039_00.verf 00
20230813 r_a_20045_20230813_45040_00.verf 00
20230813 r_a_20045_20230813_45042_00.verf 00
20230813 r_i_20045_20230813_45043_00.verf 00
20230813 r_i_20045_20230813_45049_00.verf 00
20230813 r_i_20045_20230813_45054_00.verf 00
20230813 r_i_20045_20230813_45057_00.verf 00
20230813 r_s_20045_20230813_45033_00.verf 00
20230813 r_s_20045_20230813_45051_00.verf 00
20230813 r_s_20045_20230813_45052_00.verf 00
20230813 r_s_20045_20230813_45055_00.verf 00
20230813 r_s_20045_20230813_45056_00.verf 00

15、工号登陆日志脚本

编写zxw.sh 用登录工号看下eai日志,将以下脚本复制到主机用户下的/home/app/script/下执行
sh /home/app/script/zxw.sh
#read -p "please enter:" 输入登陆工号查询日志
3、脚本
#!/bin/bash
#name     :Extract job number log
LOGS= `cd /logs/bboss-new-eai/`
ROAD=/logs/bboss-new-eai/
TIME=`date +%F`
LOG=control.$TIME.log
if [ -e $ROAD ]
  then $LOGS
fi
#enter employee number
read -p "please enter:" commond
grep "$commond" $ROAD$LOG





















举报

相关推荐

0 条评论