⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 np_treestats.py

📁 该软件根据网络数据生成NetFlow记录。NetFlow可用于网络规划、负载均衡、安全监控等
💻 PY
📖 第 1 页 / 共 5 页
字号:
        def by_nobs(a, b):            return b[1][0] - a[1][0]                def by_1(a, b):            return b[1] - a[1]        def by_bytes(a, b):            return b[1][1] - a[1][1]        def by_nconns(a, b):            return len(b[1][2]) - len(a[1][2])        def by_id(a, b):            return a.id - b.id        def attach_uconns():            #            # Find any transaction-less connections belonging to this tree            # - won't be linked in as no transaction            # - assume belong if connid is between first and last or last+1            #            gotone = 0            first = connsf[0].id            last = connsf[-1].id            for c in uconns: # small list so linear scan                id = c.id                if first < id < last or id == last+1:                    uconns.remove(c)                    connsf.append(c)                    cdict[id] = c                    last = max(last, id)                    gotone += 1                    c.intree = 1                #elif c > last:                    #break            if gotone:                connsf.sort(by_id)        def write_linkdata(t):            ld = {} # dictionary of link types            #ndlinks = nalinks = nduplinks = 0            v = t.ob.ldict.values()            nlinks = len(v) # no of discrete URL links of all types derived from all sights of this object for this client                            #for type, links in v:            for ent in v:                type = ent[0]                links = ent[1]                e = ld.setdefault(type, [0, 0, 0, 0])                e[0] += 1 # no of discrete URL links of this type derived from all sights of object                got = 0                for l in links:                    if l.trans == t:                        got = 1                        break                if got:                    #ndlinks += 1                    e[1] += 1 # no of discrete URL links of this type in this instance of the object                    if l.target:                        e[2] += 1 # ditto followed                    #nduplinks += len(l.subs)                    e[3] += len(l.subs) # no of duplicated links for this type in this instance            #print nlinks, ndlinks, nduplinks, ld            tl = ld.items()            write(' %d %d' % (nlinks, len(tl)))            for t, d in tl:                write(' %d %d %d %d %d' % (t, d[0], d[1], d[2], d[3]))                                                                        #        #        # Main fn starts here        #        A_INTREE = 0x1        A_UNLINKED = 0x2        A_REFRESH = 0x4        A_INVAL = 0x8        A_REVISIT = 0x10        trace = 1 and self.trace        #trace = 1        if trace:            print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'            print 'Suck_tree for %s %s' % (client.Class, client.addr_str())        spd = self.spdict        cid = client.id        self.nob += len(client.translist)        write = self.writepage        # tmp        tdict = {}        for t in client.translist:            tdict[t] = 0        # end tmp                td = client.find_trees2()        # iterate over trees - g is generator for iteration over transactions        for g, url, rsid, rconnid, order, start, ua, sa, ltype, ctype in td:            #tid = (url, rconnid)            tlist = []            sdict = {}            cdict = {}            nob = 0            bytes = 0            persist = 0            dflags = 0            if trace:                print 'tree root', url                if start != None:                    print 'start', start/1000                                            # iterate over transactions            for t in g:                connid = t.connid                tc = t.TConn                if connid < 0:                    # dummy - don't count                    continue                tdict[t] |= A_INTREE # tmp                tc.intree = 1                tc.pageno = self.npages                t.intree = 1                if start == None:                    # root was a dummy - use first real                    if t.connorder == 0:                        start = tc.open                    else:                        start = t.reqstart                    rsid = tc.server                    ua = t.uagent                    sa = t.server                    if trace:                        print 'start', start/1000                tlist.append(t)                nb = t.nbytes                                nob += 1                bytes += nb                cdict[connid] = tc                sid = t.servaddr                sdict[sid] = t.server                spd[sid] = None                                persist |= tc.persist                if tc.delays:                    dflags |= tc.delays[-1]                            if nob == 0:                continue            if nob > self.nob_thresh:                self.npagesb += 1            #conns = cdict.keys()            connsf = cdict.values()            connsf.sort(by_id)            if uconns:                attach_uconns()            tdel = self.remdels(connsf)            conns = cdict.keys()            #print conns            for c in conns:                cdict[c] = 1            ends = []            nends = []            acc_del = 0            delv = 0            for t in tlist:                tend = t.repend/1000                ends.append(tend)                dsave = t.TConn.dsave                nends.append(tend-dsave)                acc_del += dsave                delv += dsave*dsave            ends.sort()            end = ends[-1]            if nob > self.nob_thresh:                self.nlpages += 1                #end85 = ends[int((nob*0.85)-0.5)]            else:                #end85 = end                self.nspages += 1            end85 = ends[int((nob*0.85)-0.5)]            if tdel:                self.npagesd += 1                nends.sort()                nend = nends[-1]                if nob > self.nob_thresh:                    self.npagesdb += 1                    #nend85 = nends[int((nob*0.85)-0.5)]                #else:                    #nend85 = nend                nend85 = nends[int((nob*0.85)-0.5)]            else:                nend = end                nend85 = end85            start /= 1000            #print 'start', start, 'end', end            tdur = int(end-start)            tdur85 = int(end85-start)            ndur = int(nend-start)            ndur85 = int(nend85-start)            dsave = ndur-tdur            dtm =  (start/1000.0, tdur, ndur, tdur85, ndur85, dflags, acc_del,                    delv, tlist[0].TConn.dsave)            if ndur - tdur > 500 or ndur85 < 0 or ndur < 0:                str = 'WebClient #%s delay calc bad: %d/%d %d->%d %d->%d %s %d' % \                      (client.addr_str(), dsave, tdel, tdur, ndur,                       tdur85, ndur85,                        url, rconnid)                self.write_log(str)                self.write_log(str[3:]+'         ')                whoops(str)                print 'start', start                print                self.trace = 1                self.remdels(connsf)                self.trace = 0                #raw_input('...')            if trace:                print 'end', end/1000, end%1000                print conns                #raw_input('...')            self.reg_b(ua, UA)            self.reg_b(sa, SA)            nservs = len(sdict)            if nservs > 1:                #note any associated servers for the page                asd = self.serv_assoc                for s in sdict.items():                    if s[0] != rsid:                        try:                            asd[rsid][s[0]] = 1                        except KeyError:                            asd[rsid] = {s[0]: 1}                            str = 'WebClient #%s Server-assoc: %s->%s' % (client.addr_str(), intoa_string(rsid), intoa_string(s[0]))                            self.write_log(str +'         ')                            inform(str)                        self.reg_b(s[1], SA)                                        self.ppers += persist                        #url = url.replace(' ', '\32')            #ltype = (ctype << 16) | ltype             write('P %d %s %s %s %d %d %d 0x%x %d %d ' % (self.npages, url,                           intoa_string(client.id), intoa_string(rsid),                           bytes, nob, len(conns), ltype, ctype, nservs))            write('%.3f ' % (dtm[0]))            for v in dtm[1:]:                write('%d ' % (v))            write('\n')            if not self.quiet:                self.pagelist.append((url, rconnid, order, client.id, rsid,                                      bytes, nob, dtm, conns, ltype))            self.npages += 1            if self.robs:                tn = 0                for t in tlist:                    fng = t.finger                    if t.connorder == 0:                        ostart = t.TConn.open/1000000.0                    else:                        ostart = t.reqstart/1000000.0                    if t.reflink:                        olink_type = t.reflink.type                    else:                        olink_type = 0                    write('T %d %s %s %d %d 0x%x %d %d %f %d %d %d %d' % (tn, t.absurl, intoa_string(t.servaddr), t.connorder, t.nbytes,  olink_type, t.sobtype, t.retcode, ostart, fng[0], fng[1], fng[2], t.iscontainer))                    tn += 1                    if t.iscontainer:                        write_linkdata(t)                    write('\n')                write('##\n')        #for pre, l in [['U', client.unlinkedlist], ['R', client.refreshlist],                       #['I', client.invalidlist], ['V', client.revisitlist]]:        for pre, l, key in [['U', client.unlinkedlist, A_UNLINKED], ['R', client.refreshlist, A_REFRESH], ['I', client.invalidlist, A_INVAL], ['V', client.revisitlist, A_REVISIT]]:            for t in l:                tdict[t] |= key # tmp                write('%s %s %s %s %d %d ' % (pre, t.absurl,                   intoa_string(client.id), intoa_string(t.servaddr),                                                 t.nbytes, t.sobtype))                tc = t.TConn                d = tc.get_delays(1)                if d and d[0] & D_BIGDEL:                    dr = reduce(add, d[1:])                    df = d[0]                else:                    dr = 0                    df = 0                dur = tc.close-tc.open                write('%.3f %d %d %d\n' % (tc.open/1000000.0, dur/1000,                                           (dur-dr)/1000, df))        write('##\n##\n')        # tmp        for t, v in tdict.items():            if not v:                str = 'WebClient #%s unaccounted ob: conn %d' % \                      (client.addr_str(), t.connid)                self.write_log(str)                whoops(str)            ct = 0            while v:                if v & 0x1:                    ct += 1                v = v >>1            if ct > 1:                str = 'WebClient #%s multiply accounted ob: conn %d refs %d' % \                      (client.addr_str(), t.connid, v)                self.write_log(str)                whoops(str)############################################################################    def get_data(self, client):        trace = 1 and self.trace        try:            uconns, dflags = self.conn_dels(client)            if self.del_only and not dflags:                return            nuconns = len(uconns)            self.uconns += 1            self.suck_tree(client, uconns)            nattached = nuconns - len(uconns)            self.uconns_inc += nattached            if nuconns:                if nattached:                    str = 'WebClient #%s %d/%d Uconns incorporated:' % (client.addr_str(), nattached, nuconns)                    self.write_log(str)                    if trace:                        inform(str)                else:                    str = 'WebClient #%s 0/%d Uconns incorporated:' % (client.addr_str(), nuconns)                    self.write_log(str)                    if trace:                        inform(str)            self.nclients_constructed += 1        #        # save connection data        #            write =  self.writeconn            for c in client.connlist:                write('%s %d ' % (c.id, c.pageno))                dly = c.dly                write('%f %d %d %d %d %d' % (dly[0], dly[1], dly[2], dly[3],                                             c.intree, c.persist))                if len(dly) == 5:                    for v in dly[4]:                        write(' %d' % (v))                write('\n')                if not self.quiet:                    self.cdelays[c.id] = [c.dly, c.intree, c.persist]                if not c.intree:                    self.c_uninc += 1                    if c.server not in self.spdict:                        self.uservd[c.server] = None            for t in client.translist:                if not t.intree:                    self.ob_uninc += 1        except:            str = 'WebClient #%s get_data failure:' % (client.addr_str())            whoops(str)            self.write_log(str)

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -