⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 trackerstatus.java

📁 这是一个基于java编写的torrent的P2P源码
💻 JAVA
📖 第 1 页 / 共 3 页
字号:
				URL	redirect_url = null;
				
				if (reqUrl.getProtocol().equalsIgnoreCase("udp")) {

					// TODO: support multi hash scrapes on UDP

					scrapeUDP(reqUrl, message, one_of_the_hashes, one_of_the_responses);

					bSingleHashScrapes = true;

				} else {
					
					redirect_url = scrapeHTTP(reqUrl, message);
				}

				Map map = BDecoder.decode(message.toByteArray());
								
				boolean	this_is_az_tracker = map.get( "aztracker" ) != null;
				
				if ( az_tracker != this_is_az_tracker ){
						
					az_tracker	= this_is_az_tracker;
					
					TRTrackerUtils.setAZTracker( tracker_url, az_tracker );
				}
				
				Map mapFiles = map == null ? null : (Map) map.get("files");

				if (Logger.isEnabled())
					Logger.log(new LogEvent(LOGID, "Response from scrape interface "
							+ scrapeURL + ": "
							+ ((mapFiles == null) ? "null" : "" + mapFiles.size())
							+ " returned"));

				int iMinRequestInterval = 0;
				if (map != null) {
					/* "The spec":
					 * files
					 *   infohash
					 *   complete
					 *   incomplete
					 *   downloaded
					 *   name
					 *  flags
					 *    min_request_interval
					 *  failure reason
					 */
					/*
					 * files infohash complete incomplete downloaded name flags
					 * min_request_interval
					 */
					Map mapFlags = (Map) map.get("flags");
					if (mapFlags != null) {
						Long longScrapeValue = (Long) mapFlags
								.get("min_request_interval");
						if (longScrapeValue != null)
							iMinRequestInterval = longScrapeValue.intValue();
						// Tracker owners wamt this log entry
						Logger.log(new LogEvent(LOGID,
								"Received min_request_interval of " + iMinRequestInterval));
					}
				}

				if (mapFiles == null || mapFiles.size() == 0) {

					// azureus extension here to handle "failure reason" returned for
					// scrapes

					byte[] failure_reason_bytes = map == null ? null : (byte[]) map
							.get("failure reason");

					if (failure_reason_bytes != null) {
						long nextScrapeTime = SystemTime.getCurrentTime()
								+ ((iMinRequestInterval == 0) ? FAULTY_SCRAPE_RETRY_INTERVAL
										: iMinRequestInterval * 1000);

						for (int i = 0; i < responses.size(); i++) {

							TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl) responses
									.get(i);

							response.setNextScrapeStartTime(nextScrapeTime);

							response.setStatus(TRTrackerScraperResponse.ST_ERROR,
									MessageText.getString(SS + "error")
											+ new String(failure_reason_bytes,
													Constants.DEFAULT_ENCODING));

							// notifiy listeners

							scraper.scrapeReceived(response);
						}

					} else {
						if (responses.size() > 1) {
							// multi were requested, 0 returned. Therefore, multi not
							// supported
							bSingleHashScrapes = true;
							if (Logger.isEnabled())
								Logger.log(new LogEvent(LOGID, LogEvent.LT_WARNING, scrapeURL
										+ " doesn't properly support " + "multi-hash scrapes"));

							for (int i = 0; i < responses.size(); i++) {
								TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl) responses
										.get(i);

								response.setStatus(TRTrackerScraperResponse.ST_ERROR,
										MessageText.getString(SS + "error")
												+ MessageText.getString(SSErr + "invalid"));
								// notifiy listeners
								scraper.scrapeReceived(response);
							}
						} else {
							long nextScrapeTime = SystemTime.getCurrentTime()
									+ ((iMinRequestInterval == 0) ? NOHASH_RETRY_INTERVAL
											: iMinRequestInterval * 1000);
							// 1 was requested, 0 returned. Therefore, hash not found.
							TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl) responses
									.get(0);
							response.setNextScrapeStartTime(nextScrapeTime);
							response.setStatus(TRTrackerScraperResponse.ST_ERROR,
									MessageText.getString(SS + "error")
											+ MessageText.getString(SSErr + "nohash"));
							// notifiy listeners
							scraper.scrapeReceived(response);
						}
					}

					return;
				}

				/*
				 * If we requested mutliple hashes, but only one was returned, revert
				 * to Single Hash Scrapes, but continue on to process the one has that
				 * was returned (it may be a random one from the list)
				 */
				if (!bSingleHashScrapes && responses.size() > 1
						&& mapFiles.size() == 1) {
					bSingleHashScrapes = true;
					if (Logger.isEnabled())
						Logger.log(new LogEvent(LOGID, LogEvent.LT_WARNING, scrapeURL
								+ " only returned " + mapFiles.size()
								+ " hash scrape(s), but we asked for " + responses.size()));
				}

				for (int i = 0; i < responses.size(); i++) {
					TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl) responses
							.get(i);

					// LGLogger.log( "decoding response #" +i+ ": " +
					// ByteFormatter.nicePrint( response.getHash(), true ) );

					// retrieve the scrape data for the relevent infohash
					Map scrapeMap = (Map) mapFiles.get(new String(response.getHash().getBytes(),
							Constants.BYTE_ENCODING));

					if (scrapeMap == null) {
						// some trackers that return only 1 hash return a random one!
						if (responses.size() == 1 || mapFiles.size() != 1) {

							response.setNextScrapeStartTime(SystemTime.getCurrentTime()
									+ NOHASH_RETRY_INTERVAL);

							response.setStatus(TRTrackerScraperResponse.ST_ERROR,
									MessageText.getString(SS + "error")
											+ MessageText.getString(SSErr + "nohash"));
							// notifiy listeners
							scraper.scrapeReceived(response);
						} else {
							// This tracker doesn't support multiple hash requests.
							// revert status to what it was

							response.revertStatus();

							if (response.getStatus() == TRTrackerScraperResponse.ST_SCRAPING) {

								// System.out.println("Hash " +
								// ByteFormatter.nicePrint(response.getHash(), true) + "
								// mysteriously reverted to ST_SCRAPING!");

								// response.setStatus(TRTrackerScraperResponse.ST_ONLINE, "");

								response.setNextScrapeStartTime(SystemTime.getCurrentTime()
										+ FAULTY_SCRAPE_RETRY_INTERVAL);

								response.setStatus(TRTrackerScraperResponse.ST_ERROR,
										MessageText.getString(SS + "error")
												+ MessageText.getString(SSErr + "invalid"));

							} else {

								// force single-hash scrapes here

								bSingleHashScrapes = true;

								// only leave the next retry time if this is the first single
								// hash fail

								if (original_bSingleHashScrapes) {

									response.setNextScrapeStartTime(SystemTime.getCurrentTime()
											+ FAULTY_SCRAPE_RETRY_INTERVAL);
								}

							}
							// notifiy listeners
							scraper.scrapeReceived(response);

							// if this was the first scrape request in the list,
							// TrackerChecker
							// will attempt to scrape again because we didn't reset the
							// nextscrapestarttime. But the next time, bSingleHashScrapes
							// will be true, and only 1 has will be requested, so there
							// will not be infinite looping
						}
						// System.out.println("scrape: hash missing from reply");
					} else {
						// retrieve values
						int seeds = ((Long) scrapeMap.get("complete")).intValue();
						int peers = ((Long) scrapeMap.get("incomplete")).intValue();

						// make sure we dont use invalid replies
						if (seeds < 0 || peers < 0) {
							if (Logger.isEnabled()) {
								HashWrapper hash = response.getHash();
								Logger.log(new LogEvent(TorrentUtils.getDownloadManager(hash),
										LOGID, "Invalid scrape response from '" + reqUrl
												+ "': map = " + scrapeMap));
							}

							// We requested multiple hashes, but tracker didn't support
							// multiple hashes and returned 1 hash. However, that hash is
							// invalid because seeds or peers was < 0. So, exit. Scrape
							// manager will run scrapes for each individual hash.
							if (responses.size() > 1 && bSingleHashScrapes) {

								response.setStatus(TRTrackerScraperResponse.ST_ERROR,
										MessageText.getString(SS + "error")
												+ MessageText.getString(SSErr + "invalid"));

								scraper.scrapeReceived(response);

								continue;
							}

							response.setNextScrapeStartTime(SystemTime.getCurrentTime()
									+ FAULTY_SCRAPE_RETRY_INTERVAL);
							response.setStatus(TRTrackerScraperResponse.ST_ERROR,
									MessageText.getString(SS + "error")
											+ MessageText.getString(SSErr + "invalid")
											+ " "
											+ (seeds < 0 ? MessageText
													.getString("MyTorrentsView.seeds")
													+ " == " + seeds + ". " : "")
											+ (peers < 0 ? MessageText
													.getString("MyTorrentsView.peers")
													+ " == " + peers + ". " : ""));

							scraper.scrapeReceived(response);

							continue;
						}

						int scrapeInterval = TRTrackerScraperResponseImpl
								.calcScrapeIntervalSecs(iMinRequestInterval, seeds);

						long nextScrapeTime = SystemTime.getCurrentTime()
								+ (scrapeInterval * 1000);
						response.setNextScrapeStartTime(nextScrapeTime);

						// create the response
						response.setScrapeStartTime(scrapeStartTime);
						response.setSeeds(seeds);
						response.setPeers(peers);
						response.setStatus(TRTrackerScraperResponse.ST_ONLINE,
								MessageText.getString(SS + "ok"));

						// notifiy listeners
						scraper.scrapeReceived(response);
						
						try{
							if ( responses.size() == 1 && redirect_url != null ){
								
									// we only deal with redirects for single urls - if the tracker wants to
									// redirect one of a group is has to force single-hash scrapes anyway
								
								String	redirect_str = redirect_url.toString();
								
								int s_pos =  redirect_str.indexOf( "/scrape" );
								
								if ( s_pos != -1 ){
									
									URL	new_url = new URL( redirect_str.substring(0,s_pos) +
													"/announce" + redirect_str.substring(s_pos+7));
									
									if ( scraper.redirectTrackerUrl( response.getHash(), tracker_url, new_url )){
										
										removeHash( response.getHash());
									}
								}
							}
						}catch( Throwable e ){
							
							Debug.printStackTrace(e);
						}
					}
				} // for responses

			} catch (NoClassDefFoundError ignoreSSL) { // javax/net/ssl/SSLSocket
				for (int i = 0; i < responses.size(); i++) {
					TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl) responses
							.get(i);
					response.setNextScrapeStartTime(SystemTime.getCurrentTime()
							+ FAULTY_SCRAPE_RETRY_INTERVAL);
					response.setStatus(TRTrackerScraperResponse.ST_ERROR, MessageText
							.getString(SS + "error")
							+ ignoreSSL.getMessage());
					// notifiy listeners
					scraper.scrapeReceived(response);
				}
			} catch (FileNotFoundException e) {
				for (int i = 0; i < responses.size(); i++) {
					TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl) responses
							.get(i);
					response.setNextScrapeStartTime(SystemTime.getCurrentTime()
							+ FAULTY_SCRAPE_RETRY_INTERVAL);
					response.setStatus(TRTrackerScraperResponse.ST_ERROR, MessageText
							.getString(SS + "error")
							+ MessageText.getString("DownloadManager.error.filenotfound"));
					// notifiy listeners
					scraper.scrapeReceived(response);
				}
			} catch (ConnectException e) {
				for (int i = 0; i < responses.size(); i++) {
					TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl) responses
							.get(i);
					response.setNextScrapeStartTime(SystemTime.getCurrentTime()
							+ FAULTY_SCRAPE_RETRY_INTERVAL);
					response.setStatus(TRTrackerScraperResponse.ST_ERROR, MessageText
							.getString(SS + "error")
							+ e.getLocalizedMessage());
					// notifiy listeners
					scraper.scrapeReceived(response);
				}
			} catch (Exception e) {

				// for apache we can get error 414 - URL too long. simplest solution
				// for this
				// is to fall back to single scraping

				String error_message = e.getMessage();

				if (error_message != null && error_message.indexOf("414") != -1
						&& !bSingleHashScrapes) {
					bSingleHashScrapes = true;
					// Skip the setuing up the response.  We want to scrape again
					return;
				}

				String msg = Debug.getNestedExceptionMessage(e);

				for (int i = 0; i < responses.size(); i++) {
					TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl) responses
							.get(i);

					if (Logger.isEnabled()) {
						HashWrapper hash = response.getHash();
						Logger.log(new LogEvent(TorrentUtils.getDownloadManager(hash), LOGID,
								LogEvent.LT_ERROR, "Error from scrape interface " + scrapeURL
										+ " : " + msg));
					}

					response.setNextScrapeStartTime(SystemTime.getCurrentTime()
							+ FAULTY_SCRAPE_RETRY_INTERVAL);
					response.setStatus(TRTrackerScraperResponse.ST_ERROR, MessageText
							.getString(SS + "error")
							+ msg);
					// notifiy listeners
					scraper.scrapeReceived(response);
				}
			}

		} catch (Throwable t) {
			Debug.out("runScrapesSupport failed", t);
		}
	}

  protected URL 
  scrapeHTTP(
  	URL 					reqUrl, 
	ByteArrayOutputStream 	message )
  
  	throws IOException
  {
	URL	redirect_url = null;
	
  	TRTrackerUtils.checkForBlacklistedURLs( reqUrl );
  	
    reqUrl = TRTrackerUtils.adjustURLForHosting( reqUrl );

    reqUrl = AddressUtils.adjustURL( reqUrl );
    
  	// System.out.println( "scraping " + reqUrl.toString());
  	
	Properties	http_properties = new Properties();
		
	http_properties.put( ClientIDGenerator.PR_URL, reqUrl );
		
	try{
		ClientIDManagerImpl.getSingleton().generateHTTPProperties( http_properties );
		
	}catch( ClientIDException e ){
		
		throw( new IOException( e.getMessage()));
	}
	
	reqUrl = (URL)http_properties.get( ClientIDGenerator.PR_URL );

  	InputStream is = null;
  	
  	try{
	  	HttpURLConnection con = null;

	  	if ( reqUrl.getProtocol().equalsIgnoreCase("https")){
	  		
	  			// see ConfigurationChecker for SSL client defaults
	  		
	  		HttpsURLConnection ssl_con = (HttpsURLConnection)reqUrl.openConnection();
	  		
	  			// allow for certs that contain IP addresses rather than dns names
	  		
	  		ssl_con.setHostnameVerifier(
	  				new HostnameVerifier() {
	  					public boolean verify(String host, SSLSession session) {
	  						return( true );
	  					}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -