#include "crawler.h"


int main(int argc, char **argv)
{
    char *buf = (char*)malloc(FILE_SIZE);
	const char pattern[] = "//[^\"]*\.(jpg|png|gif|bmp)";
	const char page[] = "webpage.html";

	if(argc != 3)
	{
		fprintf(stderr, "usage:<URL> <PAGE>\n");
	}

	crawler(argv[1], argv[2], page, "a");

	FILE *fpr = fopen(page, "r"), *fpw = fopen(FILE_URL, "a");

	if(!fpr)
	{
		fprintf(stderr, "file open error!\n");
		exit(0);
	}

	int length = fread(buf, sizeof(char), FILE_SIZE, fpr);

	if(!length)
	{
		fprintf(stderr, "file read failed!\n");
		exit(0);
	}

	search_imageurl(buf, pattern, fpw);  //即使在代码中用printf打印相关信息，有时会仅仅输出”：Success“而没有其他任何输出

	fcloseall();

	fpr = fopen(FILE_URL, "r");

	//char *filename = (char*)malloc(LENGTH_FILENAME*sizeof(char));
	int count = 0;
	//char *domainame = (char*)malloc(128*sizeof(char));
	//char *path = (char*)malloc(256*sizeof(char));
	char *domainame = NULL;
	char *path = NULL;
	char *fn = NULL;
	char *filename = (char*)malloc(64*sizeof(char));

	while(fgets(buf, FILE_SIZE, fpr))
	{
		strcpy(filename, "./images/");
		//puts("inside while loop!\n");
		struct s_url *url = getDPF(buf);
		domainame = url->domainame;
		path = url->path;
		fn = url->filename;

		//printf("domainame:%s\npath:%s\nfilename:%s\n", domainame, path, filename);
		fflush(stdout);

		strcat(filename, fn);
		crawler(domainame, path, filename, "ab+");
	}

	free(buf);
	free(domainame);
	free(path);
	free(filename);
	fcloseall();

	return 0;
}