⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 np_rets.py

📁 该软件根据网络数据生成NetFlow记录。NetFlow可用于网络规划、负载均衡、安全监控等
💻 PY
📖 第 1 页 / 共 2 页
字号:
            print str(s)            sys.exit(1)    totconn = 0    allconntms = Numeric.zeros(N_DUR_BINS+1,)    for n, a in transperconn:        totconn += n        if n:            for i in range(N_DUR_BINS+1):                allconntms[i] += a[i]    tpf = []    for suff in ['.hist', '.pdf', '.cdf']:        f = openf(tcdir + '/transperconns' + suff, 'w')        tpf.append(f)        s = '\n#\n# Distribution of # transactions per TCP connection (%s)\n#\n' % (suff.replace('.', ''))        f.write(s)    accum = 0.0    for i in range(MAX_NTRANS+1):        n = transperconn[i][0]        if n:            p = float(n)/totconn            accum += p            write_file(tpf[0], '%10d %10d' % (i, n))            write_file(tpf[1], '%10d %7.6f' % (i, p))            write_file(tpf[2], '%10d %7.6f' % (i, accum))    tpf = []    for suff in ['.hist', '.pdf', '.cdf']:        f = openf(tcdir + '/allconndurs' + suff, 'w')        tpf.append(f)        s = '\n#\n# Distribution of TCP connection durations ms (%s)\n#\n' % (suff.replace('.', ''))        f.write(s)    accum = 0.0    for i in range(N_DUR_BINS):        n = allconntms[i]        if n:            p = float(n)/totconn            accum += p            write_file(tpf[0], '%10d %10d' % (i, n))            write_file(tpf[1], '%10d %7.6f' % (i, p))            write_file(tpf[2], '%10d %7.6f' % (i, accum))    n = allconntms[N_DUR_BINS]    if n:        p = float(n)/totconn        accum += p        write_file(tpf[0], '# %10d+ %10d' % (MAX_DUR, n))        write_file(tpf[1], '# %10d+ %7.6f' % (MAX_DUR, p))        write_file(tpf[2], '# %10d+ %7.6f' % (MAX_DUR, accum))    ntdf = openf(tcdir + '/trans-v-avedur', 'w')    ntdf.write('\n#\n# Average connection duration *per transaction* for connections carrying x transactions ms\n#\n')            for  i in range(MAX_NTRANS):        t, a = transperconn[i]        if t:            totms = totc = 0            tpf = []            for suff in ['.hist', '.pdf', '.cdf']:                f = openf(tcdir + '/conndurs-%d-trans%s' % (i, suff), 'w')                tpf.append(f)                s = '\n#\n# Distribution of TCP connection durations carrying %d transactions ms (%s)\n#\n' % (i, suff.replace('.', ''))                f.write(s)            accum = 0.0            for j in range(N_DUR_BINS):                n = a[j]                if n:                    p = float(n)/t                    accum += p                    write_file(tpf[0], '%10d %10d' % (j, n))                    write_file(tpf[1], '%10d %7.6f' % (j, p))                    write_file(tpf[2], '%10d %7.6f' % (j, accum))                    totc += n                    totms += j*n            n = a[N_DUR_BINS]            if n:                p = float(n)/totconn                accum += p                write_file(tpf[0], '# %10d+ %10d' % (MAX_DUR, n))                write_file(tpf[1], '# %10d+ %7.6f' % (MAX_DUR, p))                write_file(tpf[2], '# %10d+ %7.6f' % (MAX_DUR, accum))            if i and totc:                write_file(ntdf, '%5d %5d' % (i, totms/(totc*i)))    record(sumfile, 'TCP connection duration and transactions per connection data written to %s\n' % (tcdir+'/'))    record(sumfile, '%d connections\n%d persistent connections\n%d persistent connections carrying single transaction' % (totconn, npers, npers1))    record(sumfile, '\n================================================================\n\n')        #############################################################################    def do_output(filepath, sumfile, obs, methods, agents, filetypes, totrans, non_200, unfin, not_gets, save_agents, modeflag, showflag, transperconn, npers, npers1):    def sum_bins(a, b):        s = [0, Numeric.zeros(NBINS,), [], 0, Numeric.zeros(MAX_RETCODE,), \                                           BIGNUMBER, 0]        s[0] = a[0] + b[0]        sb = s[1]        ab = a[1]        bb = b[1]        for i in range(NBINS):            sb[i] = ab[i] + bb[i]                s[2].extend(a[2])        s[2].extend(b[2])        s[3] = a[3] + b[3]        sb = s[4]        ab = a[4]        bb = b[4]        for i in range(MAX_RETCODE):            sb[i] = ab[i] + bb[i]        s[5] = min(a[5], b[5])        s[6] = max(a[6], b[6])                    return s            finobs = obs[0]    unfinobs = obs[1]    # coalesce all dict entries for total everything    all_finished = reduce(sum_bins, finobs.values())    all_unfinished = reduce(sum_bins, unfinobs.values())    all = reduce(sum_bins, [all_finished, all_unfinished])    # combine finished/unfinished to sort types by frequency over both    sortdict = {}        for (type, v) in finobs.iteritems():        sortdict[type] = v[0]        for (type, v) in unfinobs.iteritems():        if sortdict.has_key(type):            sortdict[type] += v[0]        else:            sortdict[type] = v[0]    sortlist = [(tot, type) for (type, tot) in sortdict.iteritems()]    sortlist.sort()    sortlist.reverse()    n = 0    for tot, type in sortlist:        n += tot    #print 'totrans', totrans, 'accumtot', n, 'coalesce tot', all[0]               # construct data list of ordered finished/unfinished pairs    oblist = [(all_finished, all_unfinished, 'all-types')]    for tot, key in sortlist:        fin = finobs.get(key, [0, Numeric.zeros(NBINS,), [], 0, \                               Numeric.zeros(MAX_RETCODE,), BIGNUMBER, 0])        unfin = unfinobs.get(key, [0, Numeric.zeros(NBINS,), [], 0, \                               Numeric.zeros(MAX_RETCODE,), BIGNUMBER, 0])        oblist.append((fin, unfin, object_type_string(key)))    do_methods(filepath, methods, totrans)    do_obtypes(filepath, sumfile, oblist, filetypes, totrans)    do_retcodes(filepath, sumfile, [(all, 'all_types')], totrans)    do_obsz(filepath, sumfile, oblist, totrans, non_200, modeflag, showflag)    do_agents(filepath, sumfile, save_agents, agents)    do_transconns(filepath, sumfile, transperconn, npers, npers1)#############################################################################    scriptname = os.path.basename(argv[0])optstr = ''fspec = 0modeflag = 'r'showflag = 0save_agents = 0check_obtypes = 0try:    optlist, args = getopt.getopt(sys.argv[1:], 'hF:s:at:')except getopt.error, s:    print scriptname + ": " + str(s)    usage(scriptname)    sys.exit(1)for opt in optlist:    if opt[0] == "-h":	usage(scriptname)    elif opt[0] == "-s":        if not opt[1] in ['r', 'p', 'c']:            print 'unrecognised mode arg \'%s\'' % (opt[1])            usage(scriptname)	modeflag = opt[1]        showflag = 1    elif opt[0] == "-F":        if opt[1] == 'h':            filter_help()            sys.exit(0)        fspec = int(opt[1])        optstr = optstr + '-F' + opt[1]    elif opt[0] == '-a':        save_agents = 1    elif opt[0] == '-t':        check_obtypes = 1        objdir = opt[1] + '/'        openfilelist, counters, basepath = get_files(args)counters.printself("")if optstr:    optstr += '.'basepath += '%snp_rets' % (optstr)try:    os.mkdir(basepath)    print 'Created results directory %s' % (basepath)except OSError, s:    if str(s).find('exists'):        print 'Results directory %s already exists' % (basepath)    else:        print str(s)        sys.exit(1)filepath = basepath + '/'counters.printself_tofile(filepath + 'counters', '')# array for retvals etcretvals = Numeric.zeros(MAX_RETCODE,)methods = Numeric.zeros(MAX_METHOD,)MAX_DUR = 10*60*1000 #5mins in msN_DUR_BINS = MAX_DUR+1 # 1ms binstransperconn = []for i in range(MAX_NTRANS+1):    transperconn.append([0, Numeric.zeros(N_DUR_BINS+1,)])    totrans = 0non_200 = 0not_gets = 0npers = npers1 = 0obs = [{}, {}]very_large_obs = []errs = 0notboth = 0unfin = 0 #unfinished transnconns = 0agents = WebAgents()addagents = agents.add_hif check_obtypes:    filetypes = FileTypes(objdir=objdir, corr_by_magic=0, mlengths=[2,20,1], verbose=1, report_diff_files=0)    checktype = filetypes.check_obtypeelse:    filetypes = checktype = Nonereportpath = filepath + 'summary'sumfile = open(reportpath, 'w')sumfile.write('Summary of transaction analysis\n\n')sumfile.write('===============================\n\n\n')# re-usableconnrec, translist = allocate_http_reusable_objects()for file in openfilelist:    if len(openfilelist) > 1:	print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"	print "File %s - %d records" % (file.fnm, file.counters.nrecords)	print	sys.stdout.flush()    while 1:                rtype = file.next_type_rec(REC_TCP_HTTP)                if rtype == -1: # EOF            break        elif rtype == REC_TCP_HTTP:            translen = get_http_rec_and_trans(file, connrec, translist)            #index, connrec, translist = get_http_rec(file)        else:            file.advance()            continue        if not accept_conn(connrec.flow_inner, fspec):            continue        nconns += 1	if (not connrec.server_seen()) or (not connrec.client_seen()):            notboth += 1	    continue        server = client = None                dur = (ull2l(connrec.close()) - ull2l(connrec.open()))/1000        if dur > MAX_DUR:            bin = MAX_DUR+1        else:            bin = int(dur)        transperconn[translen][0] += 1        transperconn[translen][1][bin] += 1        if connrec.http_persistent():            npers += 1            if translen == 1:                npers1 += 1                        for i in range(translen):            t = translist[i]                    	    if t.http_serv_isdummytrans():		break            totrans += 1            	    if (not t.http_serv_isvalid()) or t.http_serv_iserr() or (not t.http_cli_isvalid()) or t.http_cli_iserr():                errs += 1		continue            if not server:                shost = connrec.dhost()                sa = t.get_server()            if not client:                chost = connrec.shost()                ua = t.get_uagent()            addagents(shost, chost, sa, ua)                                retcode = t.http_server_retcode()            if retcode >= MAX_RETCODE:                print 'Bad retcode', retcode                errs += 1                connrec.printself()                for trans in translist:			trans.printself(connrec)            if retcode == 200:                ok = 1            else:                ok = 0                non_200 +=1            method = t.http_meth()            methods[method] += 1            if method !=  HTTP_METHOD_GET:                not_gets += 1                            obsz = t.http_obj_bytes()        ##     if obsz == 0:##                 #connrec.printself(0)##                 #t.printself(connrec)##                 zerolens += 1##                 continue             obtype = t.http_rep_objtype()            if check_obtypes and obsz:                checktype(obtype, '%d.%d' % (connrec.get_conn_id(), i))                        sstatus = t.http_serv_status()            if not (sstatus & TRANS_FINISHED) and (sstatus & TRANS_INCOMPLETE):                unfin += 1                osz = obs[1]            else:                osz = obs[0]            #            # for each object type dict entry (keyed by type):            # [total trans, size array, list of sizes > array max,            #    tot non 200 responses, server response array, minsz, maxsz]            #            bins = osz.setdefault(obtype, [0, Numeric.zeros(NBINS,), [], \                                           0, Numeric.zeros(MAX_RETCODE,), \                                           BIGNUMBER, 0])            bins[0] += 1            if  ok:                if obsz == 0:                    bins[1][0] += 1                elif obsz < OBSZ_MAX:                    bins[1][(obsz/OBSZ_BIN) + 1] += 1                else:                    bins[2].append(obsz)            else:                bins[3] += 1            bins[4][retcode] += 1            bins[5] = min(bins[5], obsz)            bins[6] = max(bins[6], obsz)            if obsz > VERY_LARGE_OB_THRESH:                very_large_obs.append((split(file.fnm)[1], connrec.get_conn_id(), i, center(http_server_objtype_string(obtype), 30), obsz))#filetypes.report_diffs()print 'Summary of transaction analysis'print '===============================\n'record(sumfile, '%10d %-15s' % (nconns, 'Total TCP/HTTP connections'))record(sumfile, '%10d %-15s (%2.3f%% of total)\n' % (notboth, 'Unidirectional (rejected)', (notboth*100.0)/nconns))record(sumfile, '%10d %-15s' % (totrans, 'Total transactions'))record(sumfile, '%10d %-15s (%2.3f%%)' % (errs, 'Errors', (errs*100.0)/totrans))nvalid = totrans - errsrecord(sumfile, '%10d %-15s (%2.3f%%)\n' % (nvalid, 'Valid', (nvalid*100.0)/totrans))record(sumfile, 'Of %d valid transactions:-\n' % (nvalid))record(sumfile, '%10d %-15s (%2.3f%%)' % (unfin, 'Incomplete', (unfin*100.0)/nvalid))record(sumfile, '%10d %-15s (%2.3f%%)' % (non_200, 'Non 200 (OK) return codes', (non_200*100.0)/nvalid))ngets = totrans - not_getsrecord(sumfile, '%10d %-15s (%2.3f%%)' % (ngets, 'GET requests', (ngets*100.0)/nvalid))record(sumfile, '\n================================================================\n\n')do_output(filepath, sumfile, obs, methods, agents, filetypes, nvalid, non_200, unfin, not_gets, save_agents, modeflag, showflag, transperconn, npers, npers1)if very_large_obs:    record(sumfile, '\n %d Very large objects (> %d bytes):-\n' % (len(very_large_obs), VERY_LARGE_OB_THRESH))    print '(Listed in Summary)'    write_file(sumfile, '%s%s%s%s\n' % (center('File', 20), center('Conn/trans', 15), center('Ob-type', 30), center('Size', 10)))    for detail in very_large_obs:        write_file(sumfile, '%-20s%11d/%-3d%s%10d' % detail)print '\nAnalysis results written to ', filepath

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -