#include "download.h"
#include "parser.h"

void save_file(global_t *global, conn_t *conn)
{
	FILE 	*fp;
	char 	fname[20];

	sprintf(fname, "%d.html", ++(global->count));
	fp = fopen(fname, "w");
	if (fp == NULL) return;
	if (fputs(conn->buffer, fp) == EOF) {
		debug("保存失败...\n");
	}
	fclose(fp);
}

void get_header(global_t *global, conn_t *conn)
{
	char				*p, *q, s[1], len_str[50];
	int					nread, pre;

	// 只能一个字符一个字符读取
	pre = 0;
	p = conn->header;
	for (;;) {
		nread = read(conn->sockfd, s, 1);
		if (nread == 0) {
			debug("读取0字节 %s\n", conn->url);
			conn->length = 0;
			conn->header_finish = 1;
			return;
		}
		*p++ = *s;
		if (*s == '\r') continue;
		else if (*s == '\n') {
			if (pre == 1) {
				*p = '\0';
				break;
			}
			pre = 1;
		}
		else {
			pre = 0;
		}
	}
	debug("%s\n\n%s", conn->url, conn->header);

	// 计算文件大小
	p = strstr(conn->header, "Content-Length: ");
	if (p) {
		p += strlen("Content-Length: ");
		for (q = len_str; *p != '\r'; p++, q++) {
			*q = *p;
		}
		*q = '\0';
		conn->length = atoi(len_str);
	} else {
		conn->length = 0;
	}
	if (strstr(conn->header, "404")) {
		conn->length = 0;
	}
	conn->header_finish = 1;
}

void *dl_thread(void *arg)
{
	char				*p;
	int					n, maxfd, i, err, nfinish;
	global_t			*global;
	conn_t				*conn;
	struct timeval		tv = {3, 0};

	global = (global_t *)arg;
	for (;;) {
		printf("wait dl conn...dl's size = %d, conn's size = %d, url size = %d\n", 
				global->dl_list->length, q_size(global->conn_queue), q_size(global->url_queue));
		sem_wait(&global->dsem);
		debug("减少一个dl conn...\n");
		maxfd = -1;
		FD_ZERO(&global->rset);
		for (i = 0; i < global->dl_list->length; i++) {
			conn = (conn_t *)global->dl_list->elem[i];
			maxfd = MAX(conn->sockfd, maxfd);
			FD_SET(conn->sockfd, &global->rset);
			//debug("conn[%d].sockfd = %d, url = %s\n", i, conn->sockfd, conn->url);
		}

		debug("maxfd = %d, 开始select\n", maxfd);
		err = select(maxfd+1, &global->rset, NULL, NULL, &tv);
		debug("err = %d\n", err);
		if (err < 1) {
			sleep(3);
			sem_post(&global->dsem);
			continue;
		}
		debug("select结束\n");

		nfinish = 0;
		for (i = 0; i < global->dl_list->length; i++) {
			conn = (conn_t *)global->dl_list->elem[i];
			if (FD_ISSET(conn->sockfd, &global->rset)) {
				//读取http header
				if (!conn->header_finish) {
					get_header(global, conn);
					continue;
				}
				// 下载网页内容
				debug("nread = %d, length = %d\n", conn->nread, conn->length);
				if (conn->length > 0) {
					p = conn->buffer+conn->nread;
					n = read(conn->sockfd, p, 8192);
					conn->nread += n;
					//debug("下载了%d个字节, 总共%d个字节\n", conn->nread, conn->length);
					if (n > 0 && conn->nread < conn->length) {
						continue;
					}
					p = conn->buffer+conn->nread;
					*(p) = '\0';
					save_file(global, conn);
					// 提取网页中的所有链接
					parse_html(global, conn);
				} else {
					debug("%s Not Found #####\n", conn->url);
				}
				debug("dl size = %d, conn size = %d, nfinish = %d\n", global->dl_list->length, q_size(global->conn_queue), nfinish);
				free_conn(global, conn);
				list_remove(global->dl_list, conn);
				if (++nfinish > 1) {
					debug("download完成，减少一个dl conn...\n");
					sem_wait(&global->dsem);
				}
				q_push(global->conn_queue, conn);
				sem_post(&global->csem);
			}
		}
		if (nfinish == 0) {
			sem_post(&global->dsem);
			debug("归还一个dl conn...\n");
		}
	}

	return (void *)0;
}
