#include "crawler.h"

char* getFilename(const char *url)
{
    if(!url)
	{
		fprintf(stderr, "url is NULL!\n");
		return NULL;
	}

	const char *name = url;

	while(*(++name));

	while(1)
	{
		/*there may be a bug:if the url is ended with "/\r\n",
		  we can not get the correct name!
		*/
		if(*(name-1) == '/' && name)   //如果最后一个字符是'/',忽略它,否则，找到了name
		{
			break;
		}

		--name;

		if(name < url)
		{
			fprintf(stderr, "No name!\n");
			return NULL;
		}
	}

	char *filename = (char*)malloc(64*sizeof(char));

	strcpy(filename, name);

	//discard the CRLF("\r\n")
	int i = 0;

	for(i = strlen(filename)-1; i >= 0; --i)
	{
		if(filename[i] == '\n')
		{
			filename[i] = '\0';
			break;
		}
	}

	return filename;
}


struct s_url* getDPF(const char *url)
{
	char *domainame_start = strstr(url, "//");

	if(domainame_start)
	{
		domainame_start += 2;
	}

	char *domainame_end = strstr(domainame_start, "/");

	int length = domainame_end- domainame_start;

	char *domainame = (char*)malloc(length*sizeof(char));

	strncpy(domainame, domainame_start, length);

	char *path = (char*)malloc(256*sizeof(char));

	strcpy(path, domainame_end);

	int i = 0;
	
	for(i = strlen(path)-1; i >= 0; --i)
	{
		if(path[i] == '\n')
		{//puts("path[i] == '\n'");
			path[i] = '\0';
			break;
		}
	}
//puts(path);
	struct s_url *surl = (struct s_url*)malloc(sizeof(struct s_url));

	surl->domainame = domainame;
	surl->path = path;
	surl->filename = getFilename(url);

	return surl;
}

