📄 track.py
字号:
return (302, 'Found', {'Content-Type': 'text/html', 'Location': red}, '<A HREF="'+red+'">Click Here</A>') s = StringIO() s.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' \ '<html><head><title>BitTorrent download info</title>\n') if self.favicon is not None: s.write('<link rel="shortcut icon" href="/favicon.ico">\n') s.write('</head>\n<body>\n' \ '<h3>BitTorrent download info</h3>\n'\ '<ul>\n' '<li><strong>tracker version:</strong> %s</li>\n' \ '<li><strong>server time:</strong> %s</li>\n' \ '</ul>\n' % (version, isotime())) if self.allowed is not None: if self.show_names: names = [ (value['name'], infohash) for infohash, value in self.allowed.iteritems()] else: names = [(None, infohash) for infohash in self.allowed] else: names = [ (None, infohash) for infohash in self.downloads] if not names: s.write('<p>not tracking any files yet...</p>\n') else: names.sort() tn = 0 tc = 0 td = 0 tt = 0 # Total transferred ts = 0 # Total size nf = 0 # Number of files displayed if self.allowed is not None and self.show_names: s.write('<table summary="files" border="1">\n' \ '<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n') else: s.write('<table summary="files">\n' \ '<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n') for name, infohash in names: l = self.downloads[infohash] n = self.completed.get(infohash, 0) tn = tn + n c = self.seedcount[infohash] tc = tc + c d = len(l) - c td = td + d nf = nf + 1 if self.allowed is not None and self.show_names: if self.allowed.has_key(infohash): sz = self.allowed[infohash]['length'] # size ts = ts + sz szt = sz * n # Transferred for this torrent tt = tt + szt if self.allow_get == 1: linkname = '<a href="/file?info_hash=' + quote(infohash) + '">' + name + '</a>' else: linkname = name s.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' \ % (b2a_hex(infohash), linkname, size_format(sz), c, d, n, size_format(szt))) else: s.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' \ % (b2a_hex(infohash), c, d, n)) ttn = 0 for i in self.completed.itervalues(): ttn = ttn + i if self.allowed is not None and self.show_names: s.write('<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td><td align="right">%s</td></tr>\n' % (nf, size_format(ts), tc, td, tn, ttn, size_format(tt))) else: s.write('<tr><td align="right">%i files</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td></tr>\n' % (nf, tc, td, tn, ttn)) s.write('</table>\n' \ '<ul>\n' \ '<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n' \ '<li><em>complete:</em> number of connected clients with the complete file</li>\n' \ '<li><em>downloading:</em> number of connected clients still downloading</li>\n' \ '<li><em>downloaded:</em> reported complete downloads (total: current/all)</li>\n' \ '<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n' \ '</ul>\n') s.write('</body>\n' \ '</html>\n') return (200, 'OK', {'Content-Type': 'text/html; charset=iso-8859-1'}, s.getvalue()) except: print_exc() return (500, 'Internal Server Error', {'Content-Type': 'text/html; charset=iso-8859-1'}, 'Server Error') def scrapedata(self, infohash, return_name = True): l = self.downloads[infohash] n = self.completed.get(infohash, 0) c = self.seedcount[infohash] d = len(l) - c f = {'complete': c, 'incomplete': d, 'downloaded': n} if return_name and self.show_names and self.allowed is not None: f['name'] = self.allowed[infohash]['name'] return (f) def get_scrape(self, paramslist): fs = {} if paramslist.has_key('info_hash'): if self.config['scrape_allowed'] not in ['specific', 'full']: return (400, 'Not Authorized', default_headers, bencode({'failure_reason': "specific scrape function is not available with this tracker."})) for infohash in paramslist['info_hash']: if self.allowed is not None and infohash not in self.allowed: continue if infohash in self.downloads: fs[infohash] = self.scrapedata(infohash) else: if self.config['scrape_allowed'] != 'full': return (400, 'Not Authorized', default_headers, bencode({'failure reason': "full scrape function is not available with this tracker."})) #bencode({'failure reason': #_("full scrape function is not available with this tracker.")})) if self.allowed is not None: hashes = self.allowed else: hashes = self.downloads for infohash in hashes: fs[infohash] = self.scrapedata(infohash) return (200, 'OK', {'Content-Type': 'text/plain'}, bencode({'files': fs})) def get_file(self, infohash): if not self.allow_get: return (400, 'Not Authorized', default_headers, _("get function is not available with this tracker.")) if not self.allowed.has_key(infohash): return (404, 'Not Found', default_headers, alas) fname = self.allowed[infohash]['file'] fpath = self.allowed[infohash]['path'] return (200, 'OK', {'Content-Type': 'application/x-bittorrent', 'Content-Disposition': 'attachment; filename=' + fname}, open(fpath, 'rb').read()) def check_allowed(self, infohash, paramslist): if self.allowed is not None: if not self.allowed.has_key(infohash): return (200, 'Not Authorized', default_headers, bencode({'failure reason': "Requested download is not authorized for use with this tracker."})) #_("Requested download is not authorized for use with this tracker.")})) if self.config['allowed_controls']: if self.allowed[infohash].has_key('failure reason'): return (200, 'Not Authorized', default_headers, bencode({'failure reason': self.allowed[infohash]['failure reason']})) return None def add_data(self, infohash, event, ip, paramslist): peers = self.downloads.setdefault(infohash, {}) ts = self.times.setdefault(infohash, {}) self.completed.setdefault(infohash, 0) self.seedcount.setdefault(infohash, 0) def params(key, default = None, l = paramslist): if l.has_key(key): return l[key][0] return default myid = params('peer_id','') if len(myid) != 20: raise ValueError, 'id not of length 20' if event not in ['started', 'completed', 'stopped', 'snooped', None]: raise ValueError, 'invalid event' port = int(params('port','')) if port < 0 or port > 65535: raise ValueError, 'invalid port' left = int(params('left','')) if left < 0: raise ValueError, 'invalid amount left' peer = peers.get(myid) mykey = params('key') auth = not peer or peer.get('key', -1) == mykey or peer.get('ip') == ip gip = params('ip') local_override = gip and self.allow_local_override(ip, gip) if local_override: ip1 = gip else: ip1 = ip if not auth and local_override and self.only_local_override_ip: auth = True if params('numwant') is not None: rsize = min(int(params('numwant')), self.max_give) else: rsize = self.response_size if event == 'stopped': if peer and auth: self.delete_peer(infohash,myid) elif not peer: ts[myid] = time() peer = {'ip': ip, 'port': port, 'left': left} if mykey: peer['key'] = mykey if gip: peer['given ip'] = gip if port: if not self.natcheck or (local_override and self.only_local_override_ip): peer['nat'] = 0 self.natcheckOK(infohash,myid,ip1,port,left) else: NatCheck(self.connectback_result,infohash,myid,ip1,port,self.rawserver) else: peer['nat'] = 2**30 if event == 'completed': self.completed[infohash] += 1 if not left: self.seedcount[infohash] += 1 peers[myid] = peer else: if not auth: return rsize # return w/o changing stats ts[myid] = time() if not left and peer['left']: self.completed[infohash] += 1 self.seedcount[infohash] += 1 if not peer.get('nat', -1): for bc in self.becache[infohash]: bc[1][myid] = bc[0][myid] del bc[0][myid] if peer['left']: peer['left'] = left recheck = False if ip != peer['ip']: peer['ip'] = ip recheck = True if gip != peer.get('given ip'): if gip: peer['given ip'] = gip elif peer.has_key('given ip'): del peer['given ip'] if local_override: if self.only_local_override_ip: self.natcheckOK(infohash,myid,ip1,port,left) else: recheck = True if port and self.natcheck: if recheck: if peer.has_key('nat'): if not peer['nat']: l = self.becache[infohash] y = not peer['left'] for x in l: del x[y][myid] del peer['nat'] # restart NAT testing else: natted = peer.get('nat', -1) if natted and natted < self.natcheck: recheck = True if recheck: NatCheck(self.connectback_result,infohash,myid,ip1,port,self.rawserver) return rsize def peerlist(self, infohash, stopped, is_seed, return_type, rsize): data = {} # return data seeds = self.seedcount[infohash] data['complete'] = seeds data['incomplete'] = len(self.downloads[infohash]) - seeds if ( self.allowed is not None and self.config['allowed_controls'] and self.allowed[infohash].has_key('warning message') ): data['warning message'] = self.allowed[infohash]['warning message'] data['interval'] = self.reannounce_interval if stopped or not rsize: # save some bandwidth data['peers'] = [] return data bc = self.becache.setdefault(infohash,[[{}, {}], [{}, {}], [{}, {}]]) len_l = len(bc[0][0]) len_s = len(bc[0][1]) if not (len_l+len_s): # caches are empty! data['peers'] = [] return data l_get_size = int(float(rsize)*(len_l)/(len_l+len_s)) cache = self.cached.setdefault(infohash,[None,None,None])[return_type] if cache: if cache[0] + self.config['min_time_between_cache_refreshes'] < time(): cache = None else: if ( (is_seed and len(cache[1]) < rsize) or len(cache[1]) < l_get_size or not cache[1] ): cache = None if not cache: vv = [[],[],[]] cache = [ time(), bc[return_type][0].values()+vv[return_type], bc[return_type][1].values() ] shuffle(cache[1]) shuffle(cache[2]) self.cached[infohash][return_type] = cache
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -