trackerstatus.java
来自「Azureus is a powerful, full-featured, cr」· Java 代码 · 共 917 行 · 第 1/3 页
JAVA
917 行
is = new GZIPInputStream( is );
}
byte[] data = new byte[1024];
int nbRead = 0;
while (nbRead >= 0) {
try {
nbRead = is.read(data);
if (nbRead >= 0)
message.write(data, 0, nbRead);
Thread.sleep(20);
} catch (Exception e) {
// nbRead = -1;
// message = null;
// e.printStackTrace();
return;
}
}
} finally {
if (is != null) {
try {
is.close();
} catch (IOException e1) { }
}
}
}
protected void
scrapeUDP(
URL reqUrl,
ByteArrayOutputStream message,
byte[] hash )
throws Exception
{
/* reduce network traffic by only scraping UDP when the torrent isn't running as
* UDP version 2 contains scrape data in the announce response
*/
if ( PRUDPPacket.VERSION == 2 &&
scraper.isTorrentDownloading( hash )){
LGLogger.log( componentID, evtLifeCycle, LGLogger.SENT,
"Scrape of " + reqUrl + " skipped as torrent running and therefore scrape data available in announce replies");
return;
}
reqUrl = TRTrackerUtils.adjustURLForHosting( reqUrl );
PasswordAuthentication auth = null;
boolean auth_ok = false;
try{
if ( reqUrl.getQuery().toLowerCase().indexOf("auth") != -1 ){
auth = SESecurityManager.getPasswordAuthentication( "UDP Tracker", reqUrl );
}
int port = COConfigurationManager.getIntParameter("TCP.Listen.Port", 6881);
PRUDPPacketHandler handler = PRUDPPacketHandlerFactory.getHandler( port );
InetSocketAddress destination = new InetSocketAddress(reqUrl.getHost(),reqUrl.getPort()==-1?80:reqUrl.getPort());
String failure_reason = null;
for (int retry_loop=0;retry_loop<PRUDPPacket.DEFAULT_RETRY_COUNT;retry_loop++){
try{
PRUDPPacket connect_request = new PRUDPPacketRequestConnect();
PRUDPPacket reply = handler.sendAndReceive( auth, connect_request, destination );
if ( reply.getAction() == PRUDPPacket.ACT_REPLY_CONNECT ){
PRUDPPacketReplyConnect connect_reply = (PRUDPPacketReplyConnect)reply;
long my_connection = connect_reply.getConnectionId();
PRUDPPacketRequestScrape scrape_request = new PRUDPPacketRequestScrape( my_connection, hash );
reply = handler.sendAndReceive( auth, scrape_request, destination );
if ( reply.getAction() == PRUDPPacket.ACT_REPLY_SCRAPE ){
auth_ok = true;
if ( PRUDPPacket.VERSION == 1 ){
PRUDPPacketReplyScrape scrape_reply = (PRUDPPacketReplyScrape)reply;
Map map = new HashMap();
/*
int interval = scrape_reply.getInterval();
if ( interval != 0 ){
map.put( "interval", new Long(interval ));
}
*/
byte[][] hashes = scrape_reply.getHashes();
int[] complete = scrape_reply.getComplete();
int[] downloaded = scrape_reply.getDownloaded();
int[] incomplete = scrape_reply.getIncomplete();
Map files = new ByteEncodedKeyHashMap();
map.put( "files", files );
for (int i=0;i<hashes.length;i++){
Map file = new HashMap();
byte[] resp_hash = hashes[i];
// System.out.println("got hash:" + ByteFormatter.nicePrint( resp_hash, true ));
files.put( new String(resp_hash, Constants.BYTE_ENCODING), file );
file.put( "complete", new Long(complete[i]));
file.put( "downloaded", new Long(downloaded[i]));
file.put( "incomplete", new Long(incomplete[i]));
}
byte[] data = BEncoder.encode( map );
message.write( data );
return;
}else{
PRUDPPacketReplyScrape2 scrape_reply = (PRUDPPacketReplyScrape2)reply;
Map map = new HashMap();
/*
int interval = scrape_reply.getInterval();
if ( interval != 0 ){
map.put( "interval", new Long(interval ));
}
*/
int[] complete = scrape_reply.getComplete();
int[] downloaded = scrape_reply.getDownloaded();
int[] incomplete = scrape_reply.getIncomplete();
Map files = new ByteEncodedKeyHashMap();
map.put( "files", files );
Map file = new HashMap();
byte[] resp_hash = hash;
// System.out.println("got hash:" + ByteFormatter.nicePrint( resp_hash, true ));
files.put( new String(resp_hash, Constants.BYTE_ENCODING), file );
file.put( "complete", new Long(complete[0]));
file.put( "downloaded", new Long(downloaded[0]));
file.put( "incomplete", new Long(incomplete[0]));
byte[] data = BEncoder.encode( map );
message.write( data );
return;
}
}else{
failure_reason = ((PRUDPPacketReplyError)reply).getMessage();
LGLogger.log(componentID, evtErrors, LGLogger.ERROR,
"Response from scrape interface : " + failure_reason );
break;
}
}else{
failure_reason = ((PRUDPPacketReplyError)reply).getMessage();
LGLogger.log(componentID, evtErrors, LGLogger.ERROR,
"Response from scrape interface : " +
((PRUDPPacketReplyError)reply).getMessage());
break;
}
}catch( PRUDPPacketHandlerException e ){
if ( e.getMessage() == null || e.getMessage().indexOf("timed out") == -1 ){
throw( e );
}
failure_reason = "Timeout";
}
}
if ( failure_reason != null ){
Map map = new HashMap();
map.put( "failure reason", failure_reason.getBytes());
byte[] data = BEncoder.encode( map );
message.write( data );
}
}finally{
if ( auth != null ){
SESecurityManager.setPasswordAuthenticationOutcome( TRTrackerClientClassicImpl.UDP_REALM, reqUrl, auth_ok );
}
}
}
protected String
getURLParam(
String url,
String param )
{
int p1 = url.indexOf( param + "=" );
if ( p1 == -1 ){
return( null );
}
int p2 = url.indexOf( "&", p1 );
if ( p2 == -1 ){
return( url.substring(p1+param.length()+1));
}
return( url.substring(p1+param.length()+1,p2));
}
protected TRTrackerScraperResponseImpl addHash(byte[] hash) {
TRTrackerScraperResponseImpl response = new TRTrackerScraperResponseImpl(this, hash);
if (scrapeURL == null) {
response.setStatus(TRTrackerScraperResponse.ST_ERROR,
MessageText.getString("Scrape.status.error") +
MessageText.getString("Scrape.status.error.badURL"));
} else {
response.setStatus(TRTrackerScraperResponse.ST_INITIALIZING,
MessageText.getString("Scrape.status.initializing"));
}
try{
hashes_mon.enter();
hashes.put(hash, response);
}finally{
hashes_mon.exit();
}
//notifiy listeners
scraper.scrapeReceived( response );
return response;
}
protected void removeHash(HashWrapper hash) {
try{
hashes_mon.enter();
hashes.remove( hash.getHash() );
}finally{
hashes_mon.exit();
}
}
protected URL
getTrackerURL()
{
return( tracker_url );
}
protected Map getHashes() {
return hashes;
}
protected AEMonitor
getHashesMonitor()
{
return( hashes_mon );
}
protected void scrapeReceived(TRTrackerScraperResponse response) {
scraper.scrapeReceived(response);
}
}
⌨️ 快捷键说明
复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?