📄 trackerstatus.java
字号:
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, 0, (new StringBuilder()).append("redirection of http scrape [").append(scrapeURL).append("] to udp failed, will retry in ").append(autoUDPscrapeEvery).append(" scrapes").toString()));
} else
if (success && !protocol.equalsIgnoreCase("udp"))
{
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, 0, (new StringBuilder()).append("redirection of http scrape [").append(scrapeURL).append("] to udp successful").toString()));
autoUDPscrapeEvery = 1;
TRTrackerUtils.setUDPProbeResult(reqUrl, true);
}
}
scrapeCount++;
if (udpScrapeURL == null)
redirect_url = scrapeHTTP(reqUrl, message);
TorrentUtils.setTLSTorrentHash(null);
break MISSING_BLOCK_LABEL_1139;
Exception exception;
exception;
TorrentUtils.setTLSTorrentHash(null);
throw exception;
Map mapFiles;
int iMinRequestInterval;
scrape_reply = message.toByteArray();
Map map = BDecoder.decode(scrape_reply);
boolean this_is_az_tracker = map.get("aztracker") != null;
if (az_tracker != this_is_az_tracker)
{
az_tracker = this_is_az_tracker;
TRTrackerUtils.setAZTracker(tracker_url, az_tracker);
}
mapFiles = (Map)map.get("files");
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, (new StringBuilder()).append("Response from scrape interface ").append(scrapeURL).append(": ").append(mapFiles != null ? (new StringBuilder()).append("").append(mapFiles.size()).toString() : "null").append(" returned").toString()));
iMinRequestInterval = 0;
if (map != null)
{
Map mapFlags = (Map)map.get("flags");
if (mapFlags != null)
{
Long longScrapeValue = (Long)mapFlags.get("min_request_interval");
if (longScrapeValue != null)
iMinRequestInterval = longScrapeValue.intValue();
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, (new StringBuilder()).append("Received min_request_interval of ").append(iMinRequestInterval).toString()));
}
}
if (mapFiles != null && mapFiles.size() != 0)
break MISSING_BLOCK_LABEL_1773;
byte failure_reason_bytes[] = map != null ? (byte[])(byte[])map.get("failure reason") : null;
if (failure_reason_bytes != null)
{
long nextScrapeTime = SystemTime.getCurrentTime() + (long)(iMinRequestInterval != 0 ? iMinRequestInterval * 1000 : 0x927c0);
for (int i = 0; i < responses.size(); i++)
{
TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl)responses.get(i);
response.setNextScrapeStartTime(nextScrapeTime);
response.setStatus(1, (new StringBuilder()).append(MessageText.getString("Scrape.status.error")).append(new String(failure_reason_bytes, "UTF8")).toString());
scraper.scrapeReceived(response);
}
} else
if (responses.size() > 1)
{
bSingleHashScrapes = true;
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, 1, (new StringBuilder()).append(scrapeURL).append(" doesn't properly support ").append("multi-hash scrapes").toString()));
for (int i = 0; i < responses.size(); i++)
{
TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl)responses.get(i);
response.setStatus(1, (new StringBuilder()).append(MessageText.getString("Scrape.status.error")).append(MessageText.getString("Scrape.status.error.invalid")).toString());
scraper.scrapeReceived(response);
}
} else
{
long nextScrapeTime = SystemTime.getCurrentTime() + (long)(iMinRequestInterval != 0 ? iMinRequestInterval * 1000 : 0xa4cb80);
TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl)responses.get(0);
response.setNextScrapeStartTime(nextScrapeTime);
response.setStatus(1, (new StringBuilder()).append(MessageText.getString("Scrape.status.error")).append(MessageText.getString("Scrape.status.error.nohash")).toString());
scraper.scrapeReceived(response);
}
numActiveScrapes--;
return;
if (!bSingleHashScrapes && responses.size() > 1 && mapFiles.size() == 1)
{
bSingleHashScrapes = true;
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, 1, (new StringBuilder()).append(scrapeURL).append(" only returned ").append(mapFiles.size()).append(" hash scrape(s), but we asked for ").append(responses.size()).toString()));
}
for (int i = 0; i < responses.size(); i++)
{
TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl)responses.get(i);
Map scrapeMap = (Map)mapFiles.get(new String(response.getHash().getBytes(), "ISO-8859-1"));
if (scrapeMap == null)
{
if (responses.size() == 1 || mapFiles.size() != 1)
{
response.setNextScrapeStartTime(SystemTime.getCurrentTime() + 0xa4cb80L);
response.setStatus(1, (new StringBuilder()).append(MessageText.getString("Scrape.status.error")).append(MessageText.getString("Scrape.status.error.nohash")).toString());
scraper.scrapeReceived(response);
} else
{
response.revertStatus();
if (response.getStatus() == 3)
{
response.setNextScrapeStartTime(SystemTime.getCurrentTime() + 0x927c0L);
response.setStatus(1, (new StringBuilder()).append(MessageText.getString("Scrape.status.error")).append(MessageText.getString("Scrape.status.error.invalid")).toString());
} else
{
bSingleHashScrapes = true;
if (original_bSingleHashScrapes)
response.setNextScrapeStartTime(SystemTime.getCurrentTime() + 0x927c0L);
}
scraper.scrapeReceived(response);
}
} else
{
int seeds = ((Long)scrapeMap.get("complete")).intValue();
int peers = ((Long)scrapeMap.get("incomplete")).intValue();
Long comp = (Long)scrapeMap.get("downloaded");
int completed = comp != null ? comp.intValue() : -1;
if (seeds < 0 || peers < 0 || completed < -1)
{
if (Logger.isEnabled())
{
HashWrapper hash = response.getHash();
Logger.log(new LogEvent(TorrentUtils.getDownloadManager(hash), LOGID, (new StringBuilder()).append("Invalid scrape response from '").append(reqUrl).append("': map = ").append(scrapeMap).toString()));
}
if (responses.size() > 1 && bSingleHashScrapes)
{
response.setStatus(1, (new StringBuilder()).append(MessageText.getString("Scrape.status.error")).append(MessageText.getString("Scrape.status.error.invalid")).toString());
scraper.scrapeReceived(response);
} else
{
response.setNextScrapeStartTime(SystemTime.getCurrentTime() + 0x927c0L);
response.setStatus(1, (new StringBuilder()).append(MessageText.getString("Scrape.status.error")).append(MessageText.getString("Scrape.status.error.invalid")).append(" ").append(seeds >= 0 ? "" : (new StringBuilder()).append(MessageText.getString("MyTorrentsView.seeds")).append(" == ").append(seeds).append(". ").toString()).append(peers >= 0 ? "" : (new StringBuilder()).append(MessageText.getString("MyTorrentsView.peers")).append(" == ").append(peers).append(". ").toString()).append(completed >= 0 ? "" : (new StringBuilder()).append(MessageText.getString("MyTorrentsView.completed")).append(" == ").append(completed).append(". ").toString()).toString());
scraper.scrapeReceived(response);
}
} else
{
int scrapeInterval = TRTrackerScraperResponseImpl.calcScrapeIntervalSecs(iMinRequestInterval, seeds);
long nextScrapeTime = SystemTime.getCurrentTime() + (long)(scrapeInterval * 1000);
response.setNextScrapeStartTime(nextScrapeTime);
response.setScrapeStartTime(scrapeStartTime);
response.setSeeds(seeds);
response.setPeers(peers);
response.setCompleted(completed);
response.setStatus(2, MessageText.getString("Scrape.status.ok"));
scraper.scrapeReceived(response);
try
{
if (responses.size() == 1 && redirect_url != null)
{
String redirect_str = redirect_url.toString();
int s_pos = redirect_str.indexOf("/scrape");
if (s_pos != -1)
{
URL new_url = new URL((new StringBuilder()).append(redirect_str.substring(0, s_pos)).append("/announce").append(redirect_str.substring(s_pos + 7)).toString());
if (scraper.redirectTrackerUrl(response.getHash(), tracker_url, new_url))
removeHash(response.getHash());
}
}
}
catch (Throwable e)
{
Debug.printStackTrace(e);
}
}
}
}
break MISSING_BLOCK_LABEL_3296;
NoClassDefFoundError ignoreSSL;
ignoreSSL;
for (int i = 0; i < responses.size(); i++)
{
TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl)responses.get(i);
response.setNextScrapeStartTime(SystemTime.getCurrentTime() + 0x927c0L);
response.setStatus(1, (new StringBuilder()).append(MessageText.getString("Scrape.status.error")).append(ignoreSSL.getMessage()).toString());
scraper.scrapeReceived(response);
}
break MISSING_BLOCK_LABEL_3296;
FileNotFoundException e;
e;
for (int i = 0; i < responses.size(); i++)
{
TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl)responses.get(i);
response.setNextScrapeStartTime(SystemTime.getCurrentTime() + 0x927c0L);
response.setStatus(1, (new StringBuilder()).append(MessageText.getString("Scrape.status.error")).append(MessageText.getString("DownloadManager.error.filenotfound")).toString());
scraper.scrapeReceived(response);
}
break MISSING_BLOCK_LABEL_3296;
e;
setAllError(e);
break MISSING_BLOCK_LABEL_3296;
e;
setAllError(e);
break MISSING_BLOCK_LABEL_3296;
e;
setAllError(e);
break MISSING_BLOCK_LABEL_3296;
e;
setAllError(e);
break MISSING_BLOCK_LABEL_3296;
e;
String error_message;
error_message = e.getMessage();
if (error_message == null)
break MISSING_BLOCK_LABEL_3035;
if (error_message.indexOf(" 500 ") < 0 && error_message.indexOf(" 400 ") < 0 && error_message.indexOf(" 403 ") < 0 && error_message.indexOf(" 404 ") < 0 && error_message.indexOf(" 501 ") < 0)
break MISSING_BLOCK_LABEL_3001;
setAllError(e);
numActiveScrapes--;
return;
if (error_message.indexOf("414") == -1 || bSingleHashScrapes)
break MISSING_BLOCK_LABEL_3035;
bSingleHashScrapes = true;
numActiveScrapes--;
return;
String msg = Debug.getNestedExceptionMessage(e);
if (scrape_reply != null)
{
String trace_data;
if (scrape_reply.length <= 150)
trace_data = new String(scrape_reply);
else
trace_data = (new StringBuilder()).append(new String(scrape_reply, 0, 150)).append("...").toString();
msg = (new StringBuilder()).append(msg).append(" [").append(trace_data).append("]").toString();
}
for (int i = 0; i < responses.size(); i++)
{
TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl)responses.get(i);
if (Logger.isEnabled())
{
HashWrapper hash = response.getHash();
Logger.log(new LogEvent(TorrentUtils.getDownloadManager(hash), LOGID, 3, (new StringBuilder()).append("Error from scrape interface ").append(scrapeURL).append(" : ").append(msg).append(" (").append(e.getClass()).append(")").toString()));
}
response.setNextScrapeStartTime(SystemTime.getCurrentTime() + 0x927c0L);
response.setStatus(1, (new StringBuilder()).append(MessageText.getString("Scrape.status.error")).append(msg).toString());
scraper.scrapeReceived(response);
}
numActiveScrapes--;
break MISSING_BLOCK_LABEL_3344;
Throwable t;
t;
Debug.out("runScrapesSupport failed", t);
numActiveScrapes--;
break MISSING_BLOCK_LABEL_3344;
Exception exception1;
exception1;
numActiveScrapes--;
throw exception1;
}
private void setAllError(Exception e)
{
Object values[];
values = hashes.values().toArray();
hashes_mon.enter();
hashes_mon.exit();
break MISSING_BLOCK_LABEL_40;
Exception exception;
exception;
hashes_mon.exit();
throw exception;
String msg = e.getLocalizedMessage();
if (e instanceof BEncodingException)
if (msg.indexOf("html") != -1)
msg = (new StringBuilder()).append("could not decode response, appears to be a website instead of tracker scrape: ").append(msg.replace('\n', ' ')).toString();
else
msg = (new StringBuilder()).append("bencoded response malformed:").append(msg).toString();
for (int i = 0; i < values.length; i++)
{
TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl)values[i];
if (Logger.isEnabled())
{
HashWrapper hash = response.getHash();
Logger.log(new LogEvent(TorrentUtils.getDownloadManager(hash), LOGID, 1, (new StringBuilder()).append("Error from scrape interface ").append(scrapeURL).append(" : ").append(msg).toString()));
}
response.setNextScrapeStartTime(SystemTime.getCurrentTime() + 0x927c0L);
response.setStatus(1, StringInterner.intern((new StringBuilder()).append(MessageText.getString("Scrape.status.error")).append(msg).append(" (IO)").toString()));
scraper.scrapeReceived(response);
}
return;
}
protected URL scrapeHTTP(URL reqUrl, ByteArrayOutputStream message)
throws IOException
{
URL redirect_url;
Properties http_properties;
InputStream is;
redirect_url = null;
TRTrackerUtils.checkForBlacklistedURLs(reqUrl);
reqUrl = TRTrackerUtils.adjustURLForHosting(reqUrl);
reqUrl = AddressUtils.adjustURL(reqUrl);
http_properties = new Properties();
http_properties.put("URL", reqUrl);
try
{
ClientIDManagerImpl.getSingleton().generateHTTPProperties(http_properties);
}
catch (ClientIDException e)
{
throw new IOException(e.getMessage());
}
reqUrl = (URL)http_properties.get("URL");
is = null;
byte data[];
int num_read;
HttpURLConnection con = null;
if (reqUrl.getProtocol().equalsIgnoreCase("https"))
{
HttpsURLConnection ssl_con = (HttpsURLConnection)reqUrl.openConnection();
ssl_con.setHostnameVerifier(new HostnameVerifier() {
final TrackerStatus this$0;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -