⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 trackerstatus.java

📁 java 文件下载器。可自定义
💻 JAVA
📖 第 1 页 / 共 3 页
字号:
// Decompiled by Jad v1.5.8e2. Copyright 2001 Pavel Kouznetsov.
// Jad home page: http://kpdus.tripod.com/jad.html
// Decompiler options: packimports(3) fieldsfirst ansi space 
// Source File Name:   TrackerStatus.java

package org.gudy.azureus2.core3.tracker.client.impl.bt;

import com.aelitis.azureus.core.networkmanager.impl.udp.UDPNetworkManager;
import com.aelitis.net.udp.uc.*;
import java.io.*;
import java.net.*;
import java.util.*;
import java.util.zip.GZIPInputStream;
import javax.net.ssl.*;
import org.gudy.azureus2.core3.config.COConfigurationManager;
import org.gudy.azureus2.core3.config.ParameterListener;
import org.gudy.azureus2.core3.internat.MessageText;
import org.gudy.azureus2.core3.logging.*;
import org.gudy.azureus2.core3.security.SESecurityManager;
import org.gudy.azureus2.core3.tracker.client.TRTrackerScraperClientResolver;
import org.gudy.azureus2.core3.tracker.client.TRTrackerScraperResponse;
import org.gudy.azureus2.core3.tracker.client.impl.TRTrackerScraperImpl;
import org.gudy.azureus2.core3.tracker.client.impl.TRTrackerScraperResponseImpl;
import org.gudy.azureus2.core3.tracker.protocol.udp.*;
import org.gudy.azureus2.core3.tracker.util.TRTrackerUtils;
import org.gudy.azureus2.core3.util.*;
import org.gudy.azureus2.plugins.clientid.ClientIDException;
import org.gudy.azureus2.pluginsimpl.local.clientid.ClientIDManagerImpl;

// Referenced classes of package org.gudy.azureus2.core3.tracker.client.impl.bt:
//			TRTrackerBTAnnouncerImpl, TRTrackerBTScraperResponseImpl, TrackerChecker

public class TrackerStatus
{

	private static final LogIDs LOGID;
	private static final String SS = "Scrape.status.";
	private static final String SSErr = "Scrape.status.error.";
	private static final int FAULTY_SCRAPE_RETRY_INTERVAL = 0x927c0;
	private static final int NOHASH_RETRY_INTERVAL = 0xa4cb80;
	private static final int GROUP_SCRAPES_MS = 0xdbba0;
	private static final int GROUP_SCRAPES_LIMIT = 20;
	private static boolean udpScrapeEnabled = true;
	private byte autoUDPscrapeEvery;
	private int scrapeCount;
	private static List logged_invalid_urls = new ArrayList();
	private static ThreadPool thread_pool = new ThreadPool("TrackerStatus", 10, true);
	private final URL tracker_url;
	private boolean az_tracker;
	private String scrapeURL;
	private HashMap hashes;
	private TRTrackerScraperImpl scraper;
	private boolean bSingleHashScrapes;
	protected AEMonitor hashes_mon;
	private final TrackerChecker checker;
	private volatile int numActiveScrapes;

	public TrackerStatus(TrackerChecker _checker, TRTrackerScraperImpl _scraper, URL _tracker_url)
	{
		autoUDPscrapeEvery = 1;
		scrapeURL = null;
		bSingleHashScrapes = false;
		hashes_mon = new AEMonitor("TrackerStatus:hashes");
		numActiveScrapes = 0;
		checker = _checker;
		scraper = _scraper;
		tracker_url = _tracker_url;
		az_tracker = TRTrackerUtils.isAZTracker(tracker_url);
		bSingleHashScrapes = COConfigurationManager.getBooleanParameter("Tracker Client Scrape Single Only");
		String trackerUrl = tracker_url.toString();
		hashes = new HashMap();
		try
		{
			trackerUrl = trackerUrl.replaceAll(" ", "");
			int position = trackerUrl.lastIndexOf('/');
			if (position >= 0 && trackerUrl.length() >= position + 9 && trackerUrl.substring(position + 1, position + 9).equals("announce"))
				scrapeURL = (new StringBuilder()).append(trackerUrl.substring(0, position + 1)).append("scrape").append(trackerUrl.substring(position + 9)).toString();
			else
			if (trackerUrl.toLowerCase().startsWith("udp:"))
				scrapeURL = trackerUrl;
			else
			if (position >= 0 && trackerUrl.lastIndexOf('.') < position)
				scrapeURL = (new StringBuilder()).append(trackerUrl).append(trackerUrl.endsWith("/") ? "" : "/").append("scrape").toString();
			else
			if (!logged_invalid_urls.contains(trackerUrl))
				logged_invalid_urls.add(trackerUrl);
		}
		catch (Throwable e)
		{
			Debug.printStackTrace(e);
		}
	}

	protected boolean isTrackerScrapeUrlValid()
	{
		return scrapeURL != null;
	}

	protected TRTrackerScraperResponseImpl getHashData(HashWrapper hash)
	{
		TRTrackerScraperResponseImpl trtrackerscraperresponseimpl;
		hashes_mon.enter();
		trtrackerscraperresponseimpl = (TRTrackerScraperResponseImpl)hashes.get(hash);
		hashes_mon.exit();
		return trtrackerscraperresponseimpl;
		Exception exception;
		exception;
		hashes_mon.exit();
		throw exception;
	}

	protected void updateSingleHash(HashWrapper hash, boolean force)
	{
		updateSingleHash(hash, force, true);
	}

	protected void updateSingleHash(HashWrapper hash, boolean force, boolean async)
	{
		if (scrapeURL == null)
		{
			if (Logger.isEnabled())
				Logger.log(new LogEvent(TorrentUtils.getDownloadManager(hash), LOGID, "TrackerStatus: scrape cancelled.. url null"));
			return;
		}
		ArrayList responsesToUpdate = new ArrayList();
		TRTrackerScraperResponseImpl response;
		hashes_mon.enter();
		response = (TRTrackerScraperResponseImpl)hashes.get(hash);
		hashes_mon.exit();
		break MISSING_BLOCK_LABEL_84;
		Exception exception;
		exception;
		hashes_mon.exit();
		throw exception;
		long lMainNextScrapeStartTime;
		if (response == null)
			response = addHash(hash);
		lMainNextScrapeStartTime = response.getNextScrapeStartTime();
		if (!force && lMainNextScrapeStartTime > SystemTime.getCurrentTime())
		{
			if (Logger.isEnabled())
				Logger.log(new LogEvent(TorrentUtils.getDownloadManager(hash), LOGID, (new StringBuilder()).append("TrackerStatus: scrape cancelled.. not forced and still ").append(lMainNextScrapeStartTime - SystemTime.getCurrentTime()).append("ms").toString()));
			return;
		}
		response.setStatus(3, MessageText.getString("Scrape.status.scraping.queued"));
		if (Logger.isEnabled())
			Logger.log(new LogEvent(TorrentUtils.getDownloadManager(hash), LOGID, "TrackerStatus: setting to scraping"));
		responsesToUpdate.add(response);
		if (bSingleHashScrapes)
			break MISSING_BLOCK_LABEL_389;
		hashes_mon.enter();
		Iterator iterHashes = hashes.values().iterator();
		do
		{
			if (!iterHashes.hasNext() || responsesToUpdate.size() >= 20)
				break;
			TRTrackerScraperResponseImpl r = (TRTrackerScraperResponseImpl)iterHashes.next();
			if (!r.getHash().equals(hash))
			{
				long lTimeDiff = Math.abs(lMainNextScrapeStartTime - r.getNextScrapeStartTime());
				if (lTimeDiff <= 0xdbba0L && r.getStatus() != 3)
				{
					r.setStatus(3, MessageText.getString("Scrape.status.scraping.queued"));
					if (Logger.isEnabled())
						Logger.log(new LogEvent(TorrentUtils.getDownloadManager(r.getHash()), LOGID, "TrackerStatus: setting to scraping via group scrape"));
					responsesToUpdate.add(r);
				}
			}
		} while (true);
		hashes_mon.exit();
		break MISSING_BLOCK_LABEL_389;
		Exception exception1;
		exception1;
		hashes_mon.exit();
		throw exception1;
		runScrapes(responsesToUpdate, force, async);
		break MISSING_BLOCK_LABEL_409;
		Throwable t;
		t;
		Debug.out("updateSingleHash() exception", t);
	}

	protected void runScrapes(final ArrayList responses, final boolean force, boolean async)
	{
		numActiveScrapes++;
		if (async)
		{
			thread_pool.run(new AERunnable() {

				final ArrayList val$responses;
				final boolean val$force;
				final TrackerStatus this$0;

				public void runSupport()
				{
					runScrapesSupport(responses, force);
				}

			
			{
				this$0 = TrackerStatus.this;
				responses = arraylist;
				force = flag;
				super();
			}
			});
			if (Logger.isEnabled())
				Logger.log(new LogEvent(LOGID, (new StringBuilder()).append("TrackerStatus: queuing '").append(scrapeURL).append("', for ").append(responses.size()).append(" of ").append(hashes.size()).append(" hashes").append(", single_hash_scrapes: ").append(bSingleHashScrapes ? "Y" : "N").append(", queue size=").append(thread_pool.getQueueSize()).toString()));
		} else
		{
			runScrapesSupport(responses, force);
		}
	}

	protected void runScrapesSupport(ArrayList responses, boolean force)
	{
		boolean original_bSingleHashScrapes;
		boolean disable_all_scrapes;
		boolean disable_stopped_scrapes;
		byte scrape_reply[];
		if (Logger.isEnabled())
			Logger.log(new LogEvent(LOGID, (new StringBuilder()).append("TrackerStatus: scraping '").append(scrapeURL).append("', for ").append(responses.size()).append(" of ").append(hashes.size()).append(" hashes").append(", single_hash_scrapes: ").append(bSingleHashScrapes ? "Y" : "N").toString()));
		original_bSingleHashScrapes = bSingleHashScrapes;
		disable_all_scrapes = !COConfigurationManager.getBooleanParameter("Tracker Client Scrape Enable");
		disable_stopped_scrapes = !COConfigurationManager.getBooleanParameter("Tracker Client Scrape Stopped Enable");
		scrape_reply = null;
		HashWrapper one_of_the_hashes;
		String info_hash;
		String flags;
		List hashesForUDP;
		one_of_the_hashes = null;
		TRTrackerScraperResponseImpl one_of_the_responses = null;
		char first_separator = scrapeURL.indexOf('?') != -1 ? '&' : '?';
		info_hash = "";
		flags = "";
		hashesForUDP = new ArrayList();
		for (int i = 0; i < responses.size(); i++)
		{
			TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl)responses.get(i);
			HashWrapper hash = response.getHash();
			if (Logger.isEnabled())
				Logger.log(new LogEvent(TorrentUtils.getDownloadManager(hash), LOGID, (new StringBuilder()).append("TrackerStatus: scraping, single_hash_scrapes = ").append(bSingleHashScrapes).toString()));
			if (!scraper.isNetworkEnabled(hash, tracker_url))
			{
				response.setNextScrapeStartTime(SystemTime.getCurrentTime() + 0x927c0L);
				response.setStatus(1, MessageText.getString("Scrape.status.networkdisabled"));
				scraper.scrapeReceived(response);
				continue;
			}
			if (!force && (disable_all_scrapes || disable_stopped_scrapes && !scraper.isTorrentRunning(hash)))
			{
				response.setNextScrapeStartTime(SystemTime.getCurrentTime() + 0x927c0L);
				response.setStatus(1, MessageText.getString("Scrape.status.disabled"));
				scraper.scrapeReceived(response);
				continue;
			}
			response.setStatus(3, MessageText.getString("Scrape.status.scraping"));
			scraper.scrapeReceived(response);
			info_hash = (new StringBuilder()).append(info_hash).append(one_of_the_hashes == null ? first_separator : '&').append("info_hash=").toString();
			info_hash = (new StringBuilder()).append(info_hash).append(URLEncoder.encode(new String(hash.getBytes(), "ISO-8859-1"), "ISO-8859-1").replaceAll("\\+", "%20")).toString();
			Object extensions[] = scraper.getExtensions(hash);
			if (extensions != null)
			{
				if (extensions[0] != null)
					info_hash = (new StringBuilder()).append(info_hash).append((String)extensions[0]).toString();
				flags = (new StringBuilder()).append(flags).append((Character)extensions[1]).toString();
			} else
			{
				flags = (new StringBuilder()).append(flags).append(TRTrackerScraperClientResolver.FL_NONE).toString();
			}
			one_of_the_responses = response;
			one_of_the_hashes = hash;
			if (hashesForUDP.size() < 70)
				hashesForUDP.add(hash);
		}

		if (one_of_the_hashes == null)
		{
			numActiveScrapes--;
			return;
		}
		URL reqUrl;
		ByteArrayOutputStream message;
		long scrapeStartTime;
		URL redirect_url;
		String protocol;
		URL udpScrapeURL;
		boolean auto_probe;
		String request = (new StringBuilder()).append(scrapeURL).append(info_hash).toString();
		if (az_tracker)
		{
			String port_details = TRTrackerUtils.getPortsForURL();
			request = (new StringBuilder()).append(request).append(port_details).toString();
			request = (new StringBuilder()).append(request).append("&azsf=").append(flags).append("&azver=").append(3).toString();
		}
		reqUrl = new URL(request);
		if (Logger.isEnabled())
			Logger.log(new LogEvent(LOGID, (new StringBuilder()).append("Accessing scrape interface using url : ").append(reqUrl).toString()));
		message = new ByteArrayOutputStream();
		scrapeStartTime = SystemTime.getCurrentTime();
		redirect_url = null;
		protocol = reqUrl.getProtocol();
		udpScrapeURL = null;
		auto_probe = false;
		if (protocol.equalsIgnoreCase("udp") && udpScrapeEnabled)
			udpScrapeURL = reqUrl;
		else
		if (protocol.equalsIgnoreCase("http") && !az_tracker && scrapeCount % autoUDPscrapeEvery == 0 && udpScrapeEnabled)
		{
			udpScrapeURL = new URL(reqUrl.toString().replaceFirst("^http", "udp"));
			auto_probe = true;
		}
		TorrentUtils.setTLSTorrentHash(one_of_the_hashes);
		if (udpScrapeURL != null)
		{
			boolean success = scrapeUDP(reqUrl, message, hashesForUDP, !auto_probe);
			if ((!success || message.size() == 0) && !protocol.equalsIgnoreCase("udp"))
			{
				udpScrapeURL = null;
				message.reset();
				if (autoUDPscrapeEvery < 16)
					autoUDPscrapeEvery <<= 1;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -