📄 download.py
字号:
# Written by Bram Cohen# see LICENSE.txt for license informationfrom zurllib import urlopenfrom urlparse import urljoinfrom btformats import check_messagefrom Choker import Chokerfrom Storage import Storagefrom StorageWrapper import StorageWrapperfrom Uploader import Uploadfrom Downloader import Downloaderfrom HTTPDownloader import HTTPDownloaderfrom Connecter import Connecterfrom Encrypter import Encoderfrom RawServer import RawServerfrom Rerequester import Rerequesterfrom DownloaderFeedback import DownloaderFeedbackfrom RateMeasure import RateMeasurefrom CurrentRateMeasure import Measurefrom PiecePicker import PiecePickerfrom Statistics import Statisticsfrom bencode import bencode, bdecodefrom sha import shafrom os import path, makedirs, listdirfrom parseargs import parseargs, formatDefinitions, defaultargsfrom socket import error as socketerrorfrom random import seedfrom traceback import print_excfrom threading import Thread, Event, Semaphorefrom time import timefrom __init__ import versiontry: from os import getpidexcept ImportError: def getpid(): return 1true = 1false = 0defaults = [ ('max_uploads', 7, "the maximum number of uploads to allow at once."), ('keepalive_interval', 120.0, 'number of seconds to pause between sending keepalives'), ('download_slice_size', 2 ** 14, "How many bytes to query for per request."), ('request_backlog', 5, "how many requests to keep in a single pipe at once."), ('max_message_length', 2 ** 23, "maximum length prefix encoding you'll accept over the wire - larger values get the connection dropped."), ('ip', '', "ip to report you have to the tracker."), ('minport', 6881, 'minimum port to listen on, counts up if unavailable'), ('maxport', 6999, 'maximum port to listen on'), ('responsefile', '', 'file the server response was stored in, alternative to url'), ('url', '', 'url to get file from, alternative to responsefile'), ('saveas', '', 'local file name to save the file as, null indicates query user'), ('timeout', 300.0, 'time to wait between closing sockets which nothing has been received on'), ('timeout_check_interval', 60.0, 'time to wait between checking if any connections have timed out'), ('max_slice_length', 2 ** 17, "maximum length slice to send to peers, larger requests are ignored"), ('max_rate_period', 20.0, "maximum amount of time to guess the current rate estimate represents"), ('bind', '', 'ip to bind to locally'), ('upload_rate_fudge', 5.0, 'time equivalent of writing to kernel-level TCP buffer, for rate adjustment'), ('display_interval', .5, 'time between updates of displayed information'), ('rerequest_interval', 5 * 60, 'time to wait between requesting more peers'), ('min_peers', 20, 'minimum number of peers to not do rerequesting'), ('http_timeout', 60, 'number of seconds to wait before assuming that an http connection has timed out'), ('max_initiate', 40, 'number of peers at which to stop initiating new connections'), ('check_hashes', 1, 'whether to check hashes on disk'), ('max_upload_rate', 0, 'maximum kB/s to upload at, 0 means no limit'), ('alloc_type', 'normal', 'allocation type (may be normal, background, pre-allocate or sparse)'), ('alloc_rate', 2.0, 'rate (in MiB/s) to allocate space at using background allocation'), ('snub_time', 30.0, "seconds to wait for data to come in over a connection before assuming it's semi-permanently choked"), ('spew', 1, "whether to display diagnostic info to stdout"), ('rarest_first_cutoff', 2, "number of downloads at which to switch from random to rarest first"), ('rarest_first_priority_cutoff', 3, 'the number of peers which need to have a piece before other partials take priority over rarest first'), ('min_uploads', 4, "the number of uploads to fill out to with extra optimistic unchokes"), ('max_files_open', 50, 'the maximum number of files to keep open at a time, 0 means no limit'), ('round_robin_period', 30, "the number of seconds between the client's switching upload targets"), ('super_seeder', 0, "whether to use special upload-efficiency-maximizing routines (only for dedicated seeds)"), ('security', 1, "whether to enable extra security features intended to prevent abuse"), ('max_connections', 0, "the absolute maximum number of peers to connect with (0 = no limit)"), ]def download(*argv, **kws): d=Download() d.download(*argv, **kws)class Download: def __init__(self): self.defaultargs = defaultargs(defaults) self.argslistheader = 'arguments are:\n' self.unpauseflag = Event() self.unpauseflag.set() self.storagewrapper = None self.super_seeding_active = false def download(self, params, filefunc, statusfunc, finfunc, errorfunc, doneflag, cols, pathFunc = None, presets = {}, exchandler = None, sem = None): self.errorfunc = errorfunc self.finfunc = finfunc def failed(reason, errorfunc = errorfunc, doneflag = doneflag): doneflag.set() if reason is not None: errorfunc(reason) if len(params) == 0: failed(self.argslistheader + formatDefinitions(defaults, cols)) return if len(params) == 1: params = ['--responsefile'] + params try: config, args = parseargs(params, defaults, 0, 1, presets = presets) if args: if config.get('responsefile', None) == None: raise ValueError, 'must have responsefile as arg or parameter, not both' if path.isfile(args[0]): config['responsefile'] = args[0] else: config['url'] = args[0] if (config['responsefile'] == '') == (config['url'] == ''): raise ValueError, 'need responsefile or url' config['max_upload_rate'] *= 1000 self.config = config except ValueError, e: failed('error: ' + str(e) + '\nrun with no args for parameter explanations') return try: if config['responsefile'] != '': h = open(config['responsefile'], 'rb') try: line = h.read(10) # quick test to see if responsefile contains a dict front,garbage = line.split(':',1) assert front[0] == 'd' n = int(front[1:]) except: failed(config['responsefile']+' is not a valid responsefile') return h.seek(0) else: h = urlopen(config['url']) response = h.read() h.close() except IOError, e: failed('problem getting response info - ' + str(e)) return try: response = bdecode(response) check_message(response) except ValueError, e: failed("got bad file info - " + str(e)) return self.response = response try: def make(f, forcedir = false): if not forcedir: f = path.split(f)[0] if f != '' and not path.exists(f): makedirs(f) info = response['info'] if info.has_key('length'): file_length = info['length'] file = filefunc(info['name'], file_length, config['saveas'], false) if file is None: return make(file) files = [(file, file_length)] else: file_length = 0 for x in info['files']: file_length += x['length'] file = filefunc(info['name'], file_length, config['saveas'], true) if file is None: return # if this path exists, and no files from the info dict exist, we assume it's a new download and # the user wants to create a new directory with the default name existing = 0 if path.exists(file): if not path.isdir(file): failed(file + 'is not a dir') return if len(listdir(file)) > 0: # if it's not empty for x in info['files']: if path.exists(path.join(file, x['path'][0])): existing = 1 if not existing: file = path.join(file, info['name']) if path.exists(file) and not path.isdir(file): if file[-8:] == '.torrent': file = file[:-8] if path.exists(file) and not path.isdir(file): failed("Can't create dir - " + info['name']) return make(file, true) # alert the UI to any possible change in path if pathFunc != None: pathFunc(file) files = [] for x in info['files']: n = file
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -