📄 djc_poss
字号:
#!/bin/sh#-----------------------------------------------------------------------# File : djc_poss# Contents: possibilistic network induction on Danish Jersey Cattle data# on real world data# Author : Christian Borgelt# History : 17.12.1999 file created# 16.01.2000 simulated annealing added# 09.03.2002 shell changed from csh to sh# 10.04.2002 all induction loops moved into functions#-----------------------------------------------------------------------function collect () {gawk '/evaluation of/ { network = $3 }/number of attributes/ { attcnt = $NF }/number of conditions/ { concnt = $NF }/number of parameters/ { parcnt = $NF }/number of tuples/ { tplcnt = $NF }/impossible tuples/ { imptpl = $4 }/minimum/ { minimum = $NF }/average/ { average = $NF }/maximum/ { maximum = $NF }/additional conditions/ { addcnt = $NF }/missing conditions/ { miscnt = $NF }END { printf("%-12s", network); printf(" %3d %5d", concnt, parcnt); printf(" %10g %10g %10g", average, minimum, maximum);}'}#-----------------------------------------------------------------------function average () {gawk 'function output() { if (NR > 0) { printf("%-10s", network); printf(" %6.1f %7.1f", concnt/n, parcnt/n); printf(" %7.3f/%6.3f/%6.3f", avg1/n, min1/n, max1/n); printf(" %7.3f/%6.3f/%6.3f\n", avg2/n, min2/n, max2/n); }}BEGIN { network = ""; n = 0; }($1 == network) { concnt += $2; parcnt += $3; avg1 += $4; min1 += $5; max1 += $6; avg2 += $7; min2 += $8; max2 += $9; n++;}($1 != network) { if (n > 0) output(); network = $1; n = 1; concnt = $2; parcnt = $3; avg1 = $4; min1 = $5; max1 = $6; avg2 = $7; min2 = $8; max2 = $9;}END { if (n > 0) output(); }' poss.tmp}#-----------------------------------------------------------------------function evaluate (){ # --- evaluate a given network neval -u"*" -c djc.pnt $1 djc.tab 2> /dev/null | \ collect >> poss.tmp neval -u"*" -s $1 djc.tab 2> /dev/null | \ gawk '/average/ { printf(" %10g", $NF); } /minimum/ { printf(" %10g", $NF); } /maximum/ { printf(" %10g\n", $NF); }' >> poss.tmp rm -f $1} # evaluate()#-----------------------------------------------------------------------function induce (){ # --- induce and evaluate networks rm -f poss.tmp ines -u'*' -mx -s$1 -e$2 $3 djc.dom djc.tab $2 2> /dev/null evaluate $2 average | tee -a poss.res} # induce()#-----------------------------------------------------------------------function indlocal (){ # --- induce and evaluate networks rm -f poss.tmp ines -u'*' -mx -s$1 -e$2 djc.dom djc.tab $2 2> /dev/null ines -c0 -g-1e-12 -u'*' -mx -s$1 -e$2 $2 djc.tab $2 2> /dev/null evaluate $2 average | tee -a poss.res} # indlocal()#-----------------------------------------------------------------------function fixed (){ # --- evaluate empty/original network rm -f poss.tmp if [[ $1 == indep ]]; then in="djc.dom"; else in="djc.pnt"; fi ines -u'*' -mx $in djc.tab $1 2> /dev/null evaluate $1 average | tee -a poss.res} # fixed()#-----------------------------------------------------------------------function owst (){ # --- optimum weight spanning tree cons. echo "---owst--------------------------------------------------------------"\ | tee -a poss.res for m in spcgain spcsgr1 chi2 mutspc; do induce owst $m done} # owst()#-----------------------------------------------------------------------function extst (){ # --- optimum weight spanning tree ext. echo "---extst-------------------------------------------------------------"\ | tee -a poss.res for m in spcgain spcsgr1 chi2 mutspc; do induce extst $m done} # owst()#-----------------------------------------------------------------------function topord (){ # --- selection on topological order echo "---topord------------------------------------------------------------"\ | tee -a poss.res for m in spcgain spcgr spcsgr1 chi2 mutspc; do induce topord $m done} # topord()#-----------------------------------------------------------------------function noloop (){ # --- selection avoiding directed loops echo "---noloop------------------------------------------------------------"\ | tee -a poss.res for m in spcgain spcsgr1 chi2 mutspc; do ines -u'*' -mx -snoloop -e$m djc.dom djc.tab $m 2> /dev/null neval -u'*' $m djc.tab 2> /dev/null | collect > poss.tmp rm -f $m average | tee -a poss.res done} # noloop()#-----------------------------------------------------------------------function local (){ # --- local structure learning echo "---local-------------------------------------------------------------"\ | tee -a poss.res for m in spcgain spcgr spcsgr1 chi2 mutspc; do induce topord $m -g-1e-12 done} # local()#-----------------------------------------------------------------------function lwise (){ # --- local structure learning echo "---lwise-------------------------------------------------------------"\ | tee -a poss.res for m in spcgain spcgr spcsgr1 chi2 mutspc; do induce topord $m -G-1e-12 done} # lwise()#-----------------------------------------------------------------------function post (){ # --- local structure learning echo "---post--------------------------------------------------------------"\ | tee -a poss.res for m in spcgain spcgr spcsgr1 chi2 mutspc; do indlocal topord $m done} # post()#-----------------------------------------------------------------------function sian (){ # --- hypertree simulated annealing echo "---sian--------------------------------------------------------------"\ | tee -a poss.res for p in 0 001; do # 0005 if (( p == 0 )); then out="sian_no"; else out="sian_yes"; fi rm -f poss.tmp for (( i = 0; i < 10; i++ )); do ines -u'*' -mx -ssian -w0.$p -S1$i djc.dom djc.tab $out \ 2> /dev/null evaluate $out done average | tee -a poss.res done} # sian()#-----------------------------------------------------------------------echo " absolute relative"\ | tee poss.resecho "network cond params avg min max avg min max"\ | tee -a poss.resecho "---------------------------------------------------------------------"\ | tee -a poss.resfixed indep # evaluate empty networkfixed orig # evaluate original networkowst # optimum weight spanning tree construction#extst # optimum weight spanning tree extensiontopord # condition selection on topological order#noloop # condition selection avoiding directed loopslocal # local structure learning (unrestricted)#lwise # local structure learning (levelwise)#post # local structure learning (preserving global)sian # hypertree simulated annealingrm -f poss.tmp # clean up temporary file
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -