
#! /bin/bash
export LANG=en_US.UTF-8
pids=`ps aux | grep -v grep | grep -v ps.sh|grep hadoop | egrep -E "namenode|jobtracker" | awk '{print $2}'`
discover_hadoop_json=""
for pid in $pids
do
port=`netstat -lnp | grep $pid/java | grep tcp | grep -v "127.0.0.1" |grep ":50070"|awk '{print $4}'|awk -F ':' '{print $2}'`
path=`ps aux | grep $pid | grep -v grep | grep -v ps.sh | grep hadoop | egrep -E "namenode|jobtracker" | awk -F " " '{for (i=1;i<=NF;i++) if($i~/-Dhadoop.home.dir/) {print $i}}'`
logpath="${path#*=}"
discover_hadoop_json="$discover_hadoop_json{'cpname':'hadoop','port':'$port','path':'$logpath','name':'Hadoop组件模板','flag':'$pid'},"
#index=`awk -F " " '{for (i=1;i<=NF;i++) if($i~/-Dhadoop.log.file/) {print $i}}'`
#echo $index
#curStr=$(echo $(eval echo \${${i}}))
#curStr=$(echo $curStr | awk '{if(substr($1,0,17)=="-Dhadoop.log.file"){print $1}}' | awk -F '=' '{print $2}' )
#num=$(echo $curStr|awk -F '.' '{print NF}')
#if [[ $num != "0" ]];then
#curStr=$curStr
#else
#curStr=""
#fi
done
result="[${discover_hadoop_json%,*}]"
result=`echo ${result//\'/\"}`
echo $result