📄 rdresumehandler.java
字号:
files[i].flushCache();
}
return;
}
boolean was_complete = isTorrentResumeDataComplete( disk_manager.getDownloadManager());
DiskManagerPiece[] pieces = disk_manager.getPieces();
//build the piece byte[]
byte[] resume_pieces = new byte[pieces.length];
if ( !force_recheck ){
for (int i = 0; i < resume_pieces.length; i++) {
if ( pieces[i].isDone()){
resume_pieces[i] = PIECE_DONE;
}else{
resume_pieces[i] = PIECE_NOT_DONE;
}
}
}
Map resume_data = new HashMap();
resume_data.put( "resume data", resume_pieces );
Map partialPieces = new HashMap();
if ( savePartialPieces && !force_recheck ){
for (int i = 0; i < pieces.length; i++) {
DiskManagerPiece piece = pieces[i];
// save the partial pieces for any pieces that have not yet been completed
// and are in-progress (i.e. have at least one block downloaded)
boolean[] downloaded = piece.getWritten();
if (( !piece.isDone()) && piece.getNbWritten() > 0 && downloaded != null ){
List blocks = new ArrayList();
for (int j = 0; j < downloaded.length; j++) {
if (downloaded[j]){
blocks.add(new Long(j));
}
}
partialPieces.put("" + i, blocks);
}
}
resume_data.put("blocks", partialPieces);
}
// savePartialPieces has overloaded meanings. It also implies that the download
// is stopping, as opposed to this being an interim resume data save, and therefore
// that the resume data should be set as "valid". Being valid has the meaning that
// blocks marked as not-done will *not* be checked when the torrent is restarted
// to see if they are actually complete.
// TODO: fix this up!!!!
long lValid = 0;
if (!force_recheck && savePartialPieces && !bStoppedMidCheck){
lValid = 1;
}
resume_data.put("valid", new Long(lValid));
for (int i=0;i<files.length;i++){
files[i].flushCache();
}
// OK, we've got valid resume data and flushed the cache
boolean is_complete = isTorrentResumeDataComplete( disk_manager.getDownloadManager(), resume_data );
if ( was_complete && is_complete ){
// no change, no point in writing
}else{
saveResumeData( resume_data );
}
}
protected Map
getResumeData()
{
return( getResumeData( disk_manager.getDownloadManager()));
}
protected static Map
getResumeData(
DownloadManager download_manager)
{
DownloadManagerState download_manager_state = download_manager.getDownloadState();
Map resumeMap = download_manager_state.getResumeData();
if ( resumeMap != null ){
// time to remove this directory based madness - just use a "data" key
Map resume_data = (Map)resumeMap.get( "data" );
if ( resume_data != null ){
return( resume_data );
}
// backward compatability here over path management changes :(
String resume_key =
download_manager.getTorrent().isSimpleTorrent()?
download_manager.getAbsoluteSaveLocation().getParent():
download_manager.getAbsoluteSaveLocation().toString();
String[] resume_keys = new String[4];
// see bug 869749 for explanation of this mangling
// unfortunately, if the torrent hasn't been saved and restored then the
// mangling with not yet have taken place. So we have to also try the
// original key (see 878015)
// also I've introduced canonicalisation into the resume key (2.1.0.5), so until any migration
// issues have been resolved we need to support both original + non-canonicalised forms
resume_keys[0] = resume_key;
try{
resume_keys[1]= new String( resume_key.getBytes(Constants.DEFAULT_ENCODING),Constants.BYTE_ENCODING);
// System.out.println( "resume: path = " + ByteFormatter.nicePrint(path )+ ", mangled_path = " + ByteFormatter.nicePrint(mangled_path));
}catch( Throwable e ){
Debug.printStackTrace( e );
}
String canonical_resume_key = resume_key;
try{
canonical_resume_key = new File( resume_key).getCanonicalFile().toString();
}catch( Throwable e ){
Debug.printStackTrace( e );
}
resume_keys[2] = canonical_resume_key;
try{
resume_keys[3]= new String( resume_keys[2].getBytes(Constants.DEFAULT_ENCODING),Constants.BYTE_ENCODING);
// System.out.println( "resume: path = " + ByteFormatter.nicePrint(path )+ ", mangled_path = " + ByteFormatter.nicePrint(mangled_path));
}catch( Throwable e ){
Debug.printStackTrace( e );
}
Map resumeDirectory = null;
for (int i=0;i<resume_keys.length;i++){
String rk = resume_keys[i];
if ( rk != null ){
resumeDirectory = (Map)resumeMap.get(rk);
if ( resumeDirectory != null ){
break;
}
}
}
// if we've migrated, move it into the right place
if ( resumeDirectory != null ){
saveResumeData( download_manager_state, resumeDirectory );
}
return( resumeDirectory );
}else{
return( null );
}
}
protected void
saveResumeData(
Map resume_data )
{
saveResumeData( disk_manager.getDownloadManager().getDownloadState(), resume_data );
}
protected static void
saveResumeData(
DownloadManagerState download_manager_state,
Map resume_data )
{
Map resume_map = new HashMap();
resume_map.put( "data", resume_data );
// for a short while (2305 B33 current) we'll save the resume data in any existing locations as well so that
// people can regress AZ versions after updating and their resume data will still work....
Map old_resume_data = download_manager_state.getResumeData();
if ( old_resume_data != null ){
Iterator it = old_resume_data.keySet().iterator();
while( it.hasNext()){
Object key = it.next();
resume_map.put( key, resume_data );
}
}
download_manager_state.setResumeData( resume_map );
download_manager_state.save();
}
public static void
setTorrentResumeDataComplete(
DownloadManagerState download_manager_state )
{
TOTorrent torrent = download_manager_state.getTorrent();
int piece_count = torrent.getNumberOfPieces();
byte[] resume_pieces = new byte[piece_count];
for (int i = 0; i < resume_pieces.length; i++) {
resume_pieces[i] = PIECE_DONE;
}
Map resume_data = new HashMap();
resume_data.put( "resume data", resume_pieces );
Map partialPieces = new HashMap();
resume_data.put("blocks", partialPieces );
resume_data.put("valid", new Long(1));
saveResumeData( download_manager_state, resume_data );
}
protected static int
clearResumeDataSupport(
DownloadManager download_manager,
DiskManagerFileInfo file,
boolean recheck,
boolean ignore_first_and_last )
{
DownloadManagerState download_manager_state = download_manager.getDownloadState();
Map resume_data = getResumeData( download_manager );
if ( resume_data == null ){
return(0);
}
int pieces_cleared = 0;
// TODO: we could be a bit smarter with the first and last pieces regarding
// partial blocks where the piece spans the file bounaries.
// clear any affected pieces
byte[] resume_pieces = (byte[])resume_data.get("resume data");
int first_piece = file.getFirstPieceNumber();
int last_piece = file.getLastPieceNumber();
if ( ignore_first_and_last ){
first_piece++;
last_piece--;
}
if ( resume_pieces != null ){
for (int i=first_piece;i<=last_piece;i++){
if ( i >= resume_pieces.length ){
break;
}
if ( resume_pieces[i] == PIECE_DONE ){
pieces_cleared++;
}
resume_pieces[i] = recheck?PIECE_RECHECK_REQUIRED:PIECE_NOT_DONE;
}
}
// clear any affected partial pieces
Map partial_pieces = (Map)resume_data.get("blocks");
if ( partial_pieces != null ){
Iterator iter = partial_pieces.keySet().iterator();
while (iter.hasNext()) {
int piece_number = Integer.parseInt((String)iter.next());
if ( piece_number >= first_piece && piece_number <= last_piece ){
iter.remove();
}
}
}
// either way we're valid as
// 1) clear -> pieces are set as not done
// 2) recheck -> pieces are set as "recheck" and will be checked on restart
resume_data.put( "valid", new Long(1));
saveResumeData( download_manager_state, resume_data );
return( pieces_cleared );
}
public static int
storageTypeChanged(
DownloadManager download_manager,
DiskManagerFileInfo file )
{
return( clearResumeDataSupport( download_manager, file, false, true ));
}
public static void
clearResumeData(
DownloadManager download_manager )
{
Map resume_data = new HashMap();
resume_data.put( "valid", new Long(0));
saveResumeData( download_manager.getDownloadState(), resume_data );
}
public void
clearResumeData()
{
clearResumeData( disk_manager.getDownloadManager());
}
public static void
clearResumeData(
DownloadManager download_manager,
DiskManagerFileInfo file )
{
clearResumeDataSupport( download_manager, file, false, false );
}
public static void
recheckFile(
DownloadManager download_manager,
DiskManagerFileInfo file )
{
clearResumeDataSupport( download_manager, file, true, false );
}
public static void
setTorrentResumeDataNearlyComplete(
DownloadManagerState download_manager_state )
{
// backwards compatability, resume data key is the dir
TOTorrent torrent = download_manager_state.getTorrent();
long piece_count = torrent.getNumberOfPieces();
byte[] resume_pieces = new byte[(int)piece_count];
for (int i = 0; i < resume_pieces.length; i++) {
resume_pieces[i] = PIECE_DONE;
}
// randomly clear some pieces
for (int i=0;i<3;i++){
int piece_num = (int)(Math.random()*piece_count);
resume_pieces[piece_num]= PIECE_NOT_DONE;
}
Map resumeMap = new HashMap();
resumeMap.put( "resume data", resume_pieces);
Map partialPieces = new HashMap();
resumeMap.put("blocks", partialPieces);
resumeMap.put("valid", new Long(0)); // recheck the not-done pieces
saveResumeData(download_manager_state,resumeMap);
}
public static boolean
isTorrentResumeDataComplete(
DownloadManager download_manager )
{
// backwards compatability, resume data key is the dir
Map resume_data = getResumeData( download_manager );
return( isTorrentResumeDataComplete( download_manager, resume_data ));
}
public static boolean
isTorrentResumeDataComplete(
DownloadManager download_manager,
Map resume_data )
{
try{
int piece_count = download_manager.getDownloadState().getTorrent().getNumberOfPieces();
if ( resume_data != null ){
byte[] pieces = (byte[])resume_data.get("resume data");
Map blocks = (Map)resume_data.get("blocks");
boolean valid = ((Long)resume_data.get("valid")).intValue() == 1;
// any partial pieced -> not complete
if ( blocks == null || blocks.size() > 0 ){
return( false );
}
if ( valid && pieces != null && pieces.length == piece_count ){
for (int i=0;i<pieces.length;i++){
if ( pieces[i] != PIECE_DONE ){
// missing piece or recheck outstanding
return( false );
}
}
return( true );
}
}
}catch( Throwable e ){
Debug.printStackTrace( e );
}
return( false );
}
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -