版权说明:本文档由用户提供并上传,收益归属内容提供方,若内容存在侵权,请进行举报或认领
文档简介
【移动应用开发技术】快网CDN怎么样统计脚本
公司使用的快网的CDN,但是快网本身并不提供日志的分析统计,我这里还没有用elk,无奈只能先使用shell对日志进行分析统计,作为运维组对业务情况的一个大致的了解。脚本是对前一天的CDNLOG进行解压缩分析,最后将结果按类输出到指定目录,再将这些结果打包扔回CDNLOG目录,因为CDNLOG目录是要上传云存储的,一并保存。其实还有一个动作,是将这些内容作为参数传给python脚本给一个监控系统,出图便于查看,这里没有贴上。同样作为留底,不喜勿喷。#!/bin/bash
function
line()
{
cols=`tput
cols`
for
t
in
`seq
1
${cols}`
do
echo
-n
"="
done
}
function
global_define()
{
yesterday=`date
-d
"-1day"
+%F`
comp_path="/srv/cdnlog/downloads/${yesterday}/"
cdnlog_list=`ls
${comp_path}
|
sed
's#.gz##g'`
uncomp_path="/srv/cdnlog/uncomp/${yesterday}/"
user=`whoami`
backup_path="/srv/cdnlog/downloads/${yesterday}/"
}
function
global_check()
{
if
[
$user
!=
root
];then
echo
"Please
use
root
."
exit
1
fi
}
function
cycle()
{
for
n
in
$cdnlog_list
do
check
define
uncomp
segmentation
source_address
method
http_code
http_size
referer
retime
platform
tag
back
cdn_node
response_size
delete
done
}
function
check()
{
if
[
-d
${uncomp_path}${n}/
];then
rm
-rf
${uncomp_path}${n}/
fi
}
function
define()
{
gz_file="${n}.gz"
log_file="${n}.log"
log_path="/srv/cdnlog/uncomp/${yesterday}/${n}/"
}
function
uncomp()
{
mkdir
-p
${log_path}
gunzip
-c
${comp_path}${gz_file}
>
${log_path}${log_file}
}
function
segmentation()
{
log_file_count=`cat
${log_path}${log_file}
|
wc
-l`
awk
-vsour_addr=${log_path}sour_addr.tmp
-vmethod=${log_path}method.tmp
-vhttp_code=${log_path}http_code.tmp
-vhttp_size=${log_path}http_size.tmp
-vreferer=${log_path}referer.tmp
-vplatform=${log_path}platform.tmp
-vcdn_node=${log_path}cdn_node.tmp
-vresponse_size=${log_path}response_size.tmp
$1
>
sour_addr
$6,$7
>
method
$9
>
http_code
$10
>
http_size
$11
>
referer
$(NF)
>
cdn_node
$(NF-1)
>
response_size}'
${log_path}${log_file}
cat
${log_path}${log_file}
|
egrep
"FCACHE_HIT_DISK|FCACHE_HIT|FCACHE_MISS"
|
awk
-F
'FCACHE_HIT_DISK|FCACHE_HIT|FCACHE_MISS'
$2}'
>
${log_path}more.tmp
awk
-vretime1=${log_path}retime1.tmp
-vretime2=${log_path}retime2.tmp
-vretime3=${log_path}retime3.tmp
-vretime4=${log_path}retime4.tmp
-vretime5=${log_path}retime5.tmp
-vretime6=${log_path}retime6.tmp
-vretime7=${log_path}retime7.tmp
-vretime8=${log_path}retime8.tmp
-vretime9=${log_path}retime9.tmp
-vtag=${log_path}tag.tmp
-vback=${log_path}back.tmp
$1
>
retime1
$2
>
retime2
$3
>
retime3
$4
>
retime4
$5
>
retime5
$6
>
retime6
$7
>
retime7
$8
>
retime8
$9
>
retime9
$10
>
tag
$12
>
back
}'
${log_path}more.tmp
cat
${log_path}${log_file}
|
awk
-F
'[(|)]'
$2}'
>
${log_path}platform.tmp
}
function
source_address()
{
line
>>
${log_path}sour_addr.log
echo
"Source
Address
Top
30
:"
>>
${log_path}sour_addr.log
echo
""
>>
${log_path}sour_addr.log
cat
${log_path}sour_addr.tmp
|
sort
-r
|
uniq
-c
|
sort
-nr
|
head
-30
>>
${log_path}sour_addr.log
sour_addr_total=`cat
${log_path}sour_addr.tmp
|
sort
-r
|
uniq
-c
|
wc
-l
`
echo
""
>>
${log_path}sour_addr.log
echo
"Source
Address
Total
:
$sour_addr_total"
>>
${log_path}sour_addr.log
line
>>
${log_path}sour_addr.log
}
function
method()
{
line
>>
${log_path}method.log
echo
"Method
&
URL
Top
30
:"
>>
${log_path}method.log
echo
""
>>
${log_path}method.log
cat
${log_path}method.tmp
|
sed
's#"##g'
|
sort
-r
|
uniq
-c
|
sort
-nr
|
head
-30
>>
${log_path}method.log
line
>>
${log_path}method.log
}
function
http_code()
{
line
>>
${log_path}http_code.log
echo
"Http
Code
Top
:"
>>
${log_path}http_code.log
echo
""
>>
${log_path}http_code.log
http_code_list=`cat
${log_path}http_code.tmp
|
sort
-r
|
uniq
-c
|
sort
-nr
|
awk
$2}'`
cat
${log_path}http_code.tmp
|
sort
-r
|
uniq
-c
|
sort
-nr
>>
${log_path}http_code.log
echo
""
>>
${log_path}http_code.log
for
y
in
$http_code_list
do
count=`cat
${log_path}http_code.log
|
awk
-vncode=$y
'$2==ncode{print
$1}'`
rate=`echo
"$count
$log_file_count"
|
awk
'{printf
("%.2f\n",$1*100/$2)}'`
echo
-e
"HTTP
$y
Count:
\t$count"
>>
${log_path}http_code.log
echo
-e
"HTTP
$y
Rate:
\t\t${rate}%"
>>
${log_path}http_code.log
echo
""
>>
${log_path}http_code.log
done
line
>>
${log_path}http_code.log
}
function
http_size()
{
size_max=`cat
${log_path}http_size.tmp
|
sort
-nr
|
head
-1`
size_min=`cat
${log_path}http_size.tmp
|
sort
-nr
|
tail
-1`
size_total_tmp=`cat
${log_path}http_size.tmp
|
awk
'{sum+=$1}END{print
sum}'`
size_total=`echo
$size_total_tmp
|
awk
'{printf
("%.0f\n",$1)}'`
size_avg=`echo
"$size_total
$log_file_count"
|
awk
'{printf
("%.2f\n",$1/$2)}'`
line
>>
${log_path}http_size.log
echo
""
>>
${log_path}http_size.log
echo
-e
"Http
Size
Max
:
\t$size_max"
>>
${log_path}http_size.log
echo
-e
"Http
Size
Min
:
\t$size_min"
>>
${log_path}http_size.log
echo
-e
"Http
Size
Total
:
\t$size_total"
>>
${log_path}http_size.log
echo
-e
"Http
Size
Avg
:
\t$size_avg"
>>
${log_path}http_size.log
line
>>
${log_path}http_size.log
}
function
referer()
{
line
>>
${log_path}referer.log
echo
"Referer
Top
30
:"
>>
${log_path}referer.log
echo
""
>>
${log_path}referer.log
cat
${log_path}referer.tmp
|
sed
's#"##g'
|
sort
-r
|
uniq
-c
|
sort
-nr
|
head
-30
>>
${log_path}referer.log
line
>>
${log_path}referer.log
}
function
retime()
{
sed
-ie
'/-/d'
${log_path}retime{1..9}.tmp
for
i
in
`seq
1
9`
do
eval
retime${i}=`cat
${log_path}retime${i}.tmp
|
wc
-l`
eval
sum${i}=`cat
${log_path}retime${i}.tmp
|
awk
'{sum+=$1}END{print
sum}'
|
awk
'{printf
("%.2f\n",$1)}'`
eval
avg${i}=`eval
echo
"\\${retime${i}}
\\${sum$i}"
|
awk
'{printf
("%.2f\n",$2/$1)}'`
eval
max${i}=`cat
${log_path}retime${i}.tmp
|
sort
-nr
|
head
-1`
eval
min${i}=`cat
${log_path}retime${i}.tmp
|
sort
-nr
|
tail
-1`
done
for
k
in
`seq
1
9`
do
line
>>
${log_path}retime${k}.log
echo
"Retime${k}
:"
>>
${log_path}retime${k}.log
echo
""
>>
${log_path}retime${k}.log
echo
-en
"Max
\t:
\t"
>>
${log_path}retime${k}.log
eval
echo
"\${max$k}"
>>
${log_path}retime${k}.log
echo
-en
"Min
\t:
\t"
>>
${log_path}retime${k}.log
eval
echo
"\${min$k}"
>>
${log_path}retime${k}.log
echo
-en
"Avg
\t:
\t"
>>
${log_path}retime${k}.log
eval
echo
"\${avg$k}"
>>
${log_path}retime${k}.log
echo
-en
"Total
:
\t"
>>
${log_path}retime${k}.log
eval
echo
"\${sum$k}"
>>
${log_path}retime${k}.log
line
>>
${log_path}retime${k}.log
done
sed
-i
's#-nan#0.00#g'
${log_path}retime{1..9}.log
}
function
platform()
{
iphone_count=`cat
${log_path}platform.tmp
|
egrep
"iPhone"
|
wc
-l`
ipad_count=`cat
${log_path}platform.tmp
|
egrep
"iPad"
|
wc
-l`
mac_count=`cat
${log_path}platform.tmp
|
egrep
"Macintosh"
|
wc
-l`
android_count=`cat
${log_path}platform.tmp
|
egrep
"Android|Adr"
|
wc
-l`
windows_count=`cat
${log_path}platform.tmp
|
egrep
"Windows
NT|Windows
98|Windows
XP"
|
wc
-l`
windows_mobile_count=`cat
${log_path}platform.tmp
|
egrep
"Windows
Phone"
|
wc
-l`
java_count=`cat
${log_path}platform.tmp
|
egrep
"java"
|
wc
-l`
linux_count=`cat
${log_path}platform.tmp
|
egrep
"X11;
Linux|linux-gnu"
|
wc
-l`
iphone_rate=`echo
"$iphone_count
$log_file_count"
|
awk
'{printf
("%.2f\n",$1/$2*100)}'`
ipad_rate=`echo
"$ipad_count
$log_file_count"
|
awk
'{printf
("%.2f\n",$1/$2*100)}'`
mac_rate=`echo
"$mac_count
$log_file_count"
|
awk
'{printf
("%.2f\n",$1/$2*100)}'`
android_rate=`echo
"$android_count
$log_file_count"
|
awk
'{printf
("%.2f\n",$1/$2*100)}'`
windows_rate=`echo
"$windows_count
$log_file_count"
|
awk
'{printf
("%.2f\n",$1/$2*100)}'`
windows_mobile_rate=`echo
"$windows_mobile_count
$log_file_count"
|
awk
'{printf
("%.2f\n",$1/$2*100)}'`
java_rate=`echo
"$java_count
$log_file_count"
|
awk
'{printf
("%.2f\n",$1/$2*100)}'`
linux_rate=`echo
"$linux_count
$log_file_count"
|
awk
'{printf
("%.2f\n",$1/$2*100)}'`
line
>>
${log_path}platform.log
echo
"Platform
Info
:"
>>
${log_path}platform.log
echo
""
>>
${log_path}platform.log
echo
-e
"iPhone
count
\t\t:
$iphone_count"
>>
${log_path}platform.log
echo
-e
"iPhone
rate
\t\t:
${iphone_rate}%"
>>
${log_path}platform.log
echo
""
>>
${log_path}platform.log
echo
-e
"iPad
count
\t\t:
$ipad_count"
>>
${log_path}platform.log
echo
-e
"iPad
rate
\t\t:
${ipad_rate}%"
>>
${log_path}platform.log
echo
""
>>
${log_path}platform.log
echo
-e
"Mac
count
\t\t:
$mac_count"
>>
${log_path}platform.log
echo
-e
"Mac
rate
\t\t:
${mac_rate}%"
>>
${log_path}platform.log
echo
""
>>
${log_path}platform.log
echo
-e
"Android
count
\t\t:
$android_count"
>>
${log_path}platform.log
echo
-e
"Android
rate
\t\t:
${android_rate}%"
>>
${log_path}platform.log
echo
""
>>
${log_path}platform.log
echo
-e
"Windows
count
\t\t:
$windows_count"
>>
${log_path}platform.log
echo
-e
"Windows
rate
\t\t:
${windows_rate}%"
>>
${log_path}platform.log
echo
""
>>
${log_path}platform.log
echo
-e
"Windows
mobile
count
\t:
$windows_mobile_count"
>>
${log_path}platform.log
echo
-e
"Windows
mobile
rate
\t:
${windows_mobile_rate}%"
>>
${log_path}platform.log
echo
""
>>
${log_path}platform.log
echo
-e
"Java
count
\t\t:
$java_count"
>>
${log_path}platform.log
echo
-e
"Java
rate
\t\t:
${java_rate}%"
>>
${log_path}platform.log
echo
""
>>
${log_path}platform.log
echo
-e
"Linux
count
\t\t:
$linux_count"
>>
${log_path}platform.log
echo
-e
"Linux
rate
\t\t:
${linux_rate}%"
>>
${log_path}platform.log
line
>>
${log_path}platform.log
}
function
tag()
{
tag_total=`cat
${log_path}more.tmp
|
wc
-l`
tag_suc=`cat
${log_path}more.tmp
|
awk
$10}'
|
grep
"1"
|
wc
-l`
tag_fail=`cat
${log_path}more.tmp
|
awk
$10}'
|
grep
"0"
|
wc
-l`
tag_suc_rate=`echo
"$tag_suc
$tag_total"
|
awk
'{printf
("%.2f\n",$1/$2*100)}'`
tag_fal_rate=`echo
"$tag_fail
$tag_total"
|
awk
'{printf
("%.2f\n",$1/$2*100)}'`
line
>>
${log_path}tag.log
echo
"Tag
of
Download
Info
:"
>>
${log_path}tag.log
echo
""
>>
${log_path}tag.log
echo
-e
"Download
Success
Count
\t\t:
$tag_suc"
>>
${log_path}tag.log
echo
-e
"Download
Success
Rate
\t\t:
${tag_suc_rate}%"
>>
${log_path}tag.log
echo
-e
"Download
Fail
Count
\t\t:
$tag_fail"
>>
${log_path}tag.log
echo
-e
"Download
Fail
Rate
\t\t:
${tag_fal_rate}%"
>>
${log_path}tag.log
line
>>
${log_path}tag.log
}
function
back()
{
back_total=`cat
${log_path}more.tmp
|
awk
$12}'
|
egrep
-v
"-"|
wc
-l`
back_rate=`echo
"$back_total
$log_file_count"
|
awk
'{printf
("%.2f\n",$1/$2*100)}'`
line
>>
${log_path}back.log
echo
"Back
Source
Info
:"
>>
${log_path}back.log
echo
""
>>
${log_path}back.log
echo
-e
"Back
Source
Count
\t:
$back_total"
>>
${log_path}back.log
echo
-e
"Back
Source
Rate
\t:
${back_rate}%"
>>
${
温馨提示
- 1. 本站所有资源如无特殊说明,都需要本地电脑安装OFFICE2007和PDF阅读器。图纸软件为CAD,CAXA,PROE,UG,SolidWorks等.压缩文件请下载最新的WinRAR软件解压。
- 2. 本站的文档不包含任何第三方提供的附件图纸等,如果需要附件,请联系上传者。文件的所有权益归上传用户所有。
- 3. 本站RAR压缩包中若带图纸,网页内容里面会有图纸预览,若没有图纸预览就没有图纸。
- 4. 未经权益所有人同意不得将文件中的内容挪作商业或盈利用途。
- 5. 人人文库网仅提供信息存储空间,仅对用户上传内容的表现方式做保护处理,对用户上传分享的文档内容本身不做任何修改或编辑,并不能对任何下载内容负责。
- 6. 下载文件中如有侵权或不适当内容,请与我们联系,我们立即纠正。
- 7. 本站不保证下载资源的准确性、安全性和完整性, 同时也不承担用户因使用这些下载资源对自己和他人造成任何形式的伤害或损失。
最新文档
- 市政城市改建工程
- 成都混声合唱谱
- 学校燃气安全隐患排查整治专项行动方案
- 小学培养良好学习习惯教学设计
- 课时规范练测试题 铁及其重要化合物
- 老年心理护理-心理图解头部
- 薪酬管理知识点
- DB 1504T 1043-2024 仁用杏绿色高效施肥及防控栽培技术规范
- 2024文旅行业社交媒体AI营销解决方案 -AI时代文旅营销如何破局增长
- 摩托车类考试题库的驾照考试题库420题
- 储罐废气收集技术方案
- 中医诊断技术在风湿病中的应用
- 永州网约车题库及答案
- 地方政府专项债券政策介绍教学课件
- 金秋乡村丰收节活动策划方案
- 采购战略管理及采购谈判技巧
- 大学物理课件57波尔共振实验
- 预防七情致病的方法
- 2023致命性肺血栓栓塞症急救护理专家共识PPT
- 专家坐诊协议书
- 班会我劳动我快乐
评论
0/150
提交评论