⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 main.c

📁 linux下网络下载软件的源码
💻 C
📖 第 1 页 / 共 2 页
字号:
/*      Get and Resume Elite EDition source codeGet and Resume Elite EDition (GREED)Copyright (C) 1999  Anoakie TurnerThis program is free software; you can redistribute it and/or modifyit under the terms of the GNU General Public License as published bythe Free Software Foundation; either version 2 of the License, or(at your option) any later version.This program is distributed in the hope that it will be useful,but WITHOUT ANY WARRANTY; without even the implied warranty ofMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See theGNU General Public License for more details.You should have received a copy of the GNU General Public Licensealong with this program; if not, write to the Free SoftwareFoundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.For more information on the GPL, please go to:http://www.gnu.org/copyleft/gpl.html        Contact:  Anoakie Turner                  Anoakie.Turner@asu.edu                  13240 N. 94th Pl.                  Scottsdale, AZ 85260*//***  To compile, use gcc -O2 -Wall -o greed greed.c******  To compile under any non linux OS, use:**  gcc -O2 -Wall -lsocket -lxnet -o greed greed.c*/#include "main.h"int CHECKIN;int STDIN;int TESTFILES;int FORK;int BACKGROUND;int IMPORT_GR;int RECURSE;int RESUMES_LEFT;int RESUME;int NEWRC;char* GR_FILE;typedef struct NODE{	struct URLstruct URL;	struct NODE *next;} NODE;NODE *list;NODE *curr;void Command (int argc, char *argv[]);void Stdin ();void File ();void GRFile ();void MailNotifyFinish();void MailNotify(URLp URL, int valid);void StartDownload ();int main (int argc, char *argv[])/***************************************	int main (int, char*)**** main() function.*************************************/{	int i = 1;	list = malloc(sizeof(NODE));	curr = list;	curr->next = NULL;	signal(SIGPIPE, SIG_IGN);	/* Sets default global variables */	RESUME = 1;	CHECKIN = 1;	STDIN = 0;	FTP_WAIT_TIME = 0;	OUTPUT_LEVEL = 5;	STDOUT = 0;	ROLLBACK = 4096;	TIMEOUT_SEC = 300;	MINSPEED = 0;	REFERRER = 1;	TESTFILES = 0;	FORK = 0;	BACKGROUND = 0;	IMPORT_GR = 0;	WAIT_RETRY = 10;	DEBUG = 0;	RECURSE = 0;	CALL_ME = 0;	RESUMES_LEFT = 0;	NEWRC = 0;	/* Parses the arguments in argv for switches */	for (i = 1; i < argc; i ++)		Switches(i, argv);	/* Reads in configuration file and resets some globals */	Proxy(NEWRC);	/* Prints out Version info */	if (OUTPUT_LEVEL > 0)		printf("%s  [Get and Resume Elite EDition] - By "			"Anoakie Turner\r\n\n", CURRENT_VERSION);	/* If the background switch was switched, it forks into the background		and exits the parent */	if (BACKGROUND)	{	OUTPUT_LEVEL = 0;		TESTFILES = 0;		if (fork() != 0)	/* If I am the parent */		{	printf("Forking into background mode!\n");			exit(0);		}	}	/* Parses the URLS in argv */	if (argc > 1)		Command (argc, argv);	/* Parses the URLS read from greed.in */	if (CHECKIN)		File ();	/* Parses the URLS read from a standard GetRight file */	if (IMPORT_GR)		GRFile ();	/* Parses the URLS read in from the standard input */	if (STDIN)		Stdin ();	StartDownload ();	/* If everything goes well, then exit with EXIT_SUCCESS */	return(EXIT_SUCCESS);}void InsertURL (char *U, int recurse)/************       ****  NEW  ****       ************/{	NODE *ptr = list;	while (ptr->next != NULL)		ptr = ptr->next;	ptr->next = malloc(sizeof(NODE));	ptr = ptr->next;	ptr->URL.name = U;	ptr->URL.recurse = recurse;	ptr->next = NULL;}void PrintList ()/************       ****  NEW  ****       ************/{	NODE *ptr = list->next;	while (ptr != NULL)	{	printf("%s\n", ptr->URL.name);		ptr = ptr->next;	}	printf("\n\n");}void Command (int argc, char *argv[])/***************************************	void Command (URLp, int, char*)**** Pre:  argc == the number of elements in argv.** Post: Attempts to retrieve the URLs in argv.*************************************/{	int i;	for (i = 1; i < argc; i ++) 		if (argv[i][0] != '-')			InsertURL(argv[i], RECURSE);}void Stdin ()/***************************************	void Stdin (URLp)**** Post: Attempts to retrieve the URLs from STDIN.** NOTE: currently doesn't support FORK!*************************************/{	char *buffer;	int result;	buffer = malloc(4096); 	printf("Waiting for input...\n");	result = scanf("%s", buffer);	while(result != EOF)	{	InsertURL(buffer, RECURSE);		buffer = malloc(4096); 		result = scanf("%s", buffer);	}}void File ()/***************************************	void File (URLp)**** Post: Attempts to retrieve the URLs from greed.in.** NOTE: currently doesn't support FORK!*************************************/{	FILE *GD;	int i;	char *buffer;	buffer = malloc(4096);	GD = fopen("greed.in", "r");	i = 0;	if (GD != NULL)	{	while(!feof(GD))		{	fread(buffer + i, 1, 1, GD);			/* If we hit a space or return in the buffer then			   we have a supposed URL! */			if(buffer[i] == ' ' || buffer[i] == '\n')			{	buffer[i] = '\0';				InsertURL(buffer, RECURSE);				buffer = malloc(4096);				i = 0;			}			else				i++;		}		unlink("greed.in");		fclose(GD);	}}void GRFile ()/***************************************	void GRFile ()**** Post: Attempts to retrieve the URLs from a standard .GRX file.** NOTE: currently doesn't support FORK!*************************************/{	FILE *GD = fopen(GR_FILE, "r");	int i = 0;	char *buffer = malloc(4096);	if (GD != NULL)	{	while(!feof(GD))		{	fread(buffer + i, 1, 1, GD);		/* If we hit a '\n' or return in the buffer and it doesn't		   start with a '/', then we have a supposed URL! */			if(buffer[0] != '/' && (buffer[i] == '\r' || buffer[i] == '\n'))			{	buffer[i+1] = '\0';				if (strstr(buffer, "URL: ") != NULL &&					strstr(buffer, "PageURL: ") == NULL)				{	buffer[i] = '\0';					InsertURL(buffer, RECURSE);					buffer = malloc(4096);					printf("**** %s ****",curr->URL.name);				}				else if (strstr(buffer, "PageURL: ") != NULL)				{	buffer[i] = '\0';					strcpy(REF, buffer + 15);				}				else if (buffer [0] == '\r' || buffer[0] == '\n')					i = 0;				i = 0;			}			else			{	i++;				if (buffer[i - 1] == '\r' || buffer[i - 1] == '\n')					i = 0;			}		}		fclose(GD);	}}void StartDownload (){	int j;	int status = 0;	pid_t childpid = 0;	curr = list->next;	if (FORK)	{	for (j = 1; j < FORK + 1 && !status && curr->next != NULL; j++)		{	childpid = fork();			if (childpid == 0)			{	status = 1;				curr = curr->next;			}		}	}	while (curr != NULL)	{	if(Parse(&curr->URL))			DownloadLoop(&curr->URL);		else if(OUTPUT_LEVEL > 0)			printf("Unable to parse %s", curr->URL.name);		for (j = 0; j < FORK && curr->next != NULL; j++)			curr = curr->next;		curr = curr->next;	}	/* If it's the child, then it exits, else it waits until the other		children have finished what they are doing */	if (!childpid)	{	if (CALL_ME && !FORK)			MailNotifyFinish();		if (OUTPUT_LEVEL > 0)			printf("Exiting...\n");		fflush(NULL);		exit(EXIT_SUCCESS);	}	if (childpid == wait(&status) && OUTPUT_LEVEL > 0)		printf("Waiting for children processes to finish downloading\n");	while(childpid == wait(&status) && wait(&status) != -1)

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -