#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <curl/curl.h>
#include "parser.h"
#include "hash.h"

#define URL_SIZE 1024
#define FILENAME_SIZE 128

static size_t write_data (void *ptr, size_t size, size_t nmemb, void *stream)
{
  int written = fwrite(ptr, size, nmemb, (FILE *)stream);
  return written;
}

char *get_next_url (FILE *url_list)
{
  int len;
  char *result = malloc(URL_SIZE * sizeof(char));
  fscanf(url_list, "%s", result);
  len = strlen(result);
  result[len] = '\0';
  return result;
}

int lines (FILE *file)
{
  char c;
  int lines = 0;
  do {
    c = fgetc(file);
    if (c == '\n') lines++;
  } while (!feof(file));
  fclose(file);
  return lines;
}

int update (FILE *list, FILE *parsed, FILE *adj_list, int size, int index, int max_pages, int *out)
{
  int i, len, link;
  int updated = 0;
  short *check;
  char buffer[URL_SIZE];

  check = calloc(max_pages, sizeof(short));

  fprintf(adj_list, "%d: ", index);
  for (i = 0; i < size; i++) {
    fscanf(parsed, "%s", buffer);
    len = strlen(buffer);
    buffer[len] = '\0';
    link = hash_out(buffer);
    if (link == -1) {
      if (hash_full()) {
        fprintf(adj_list, "-1\n");
        return updated;
      }
      link = hash_in(buffer);
      fprintf(list, "%s\n", buffer);
      updated++;
    }
    if (check[link] == 0) {
      fprintf(adj_list, "%d ", link);
      check[link] = 1;
      (*out)++;
    }
  }
  fprintf(adj_list, "-1\n");
  free(check);
  return updated;
}

int main(int argc, char *argv[])
{
  CURL *curl_handle;
  CURLcode res;
  static const char *body = "body";
  FILE *bodyfile, *url_list_a, *url_list, *parsed_urls_list, *adj_list, *nodes;
  char *bodyfilename, *URL, *domain, *title;;
  int size_b, index, url_num, new_urls, list_size, len, max_pages, out;

  if (argc == 2) {
    printf("Incorrect usage!\nMissing arguments!\n");
    return -1;
  }
  else if (argc > 3) {
    printf("Incorrect usage!\nToo many arguments!\n");
    return -2;
  }
  max_pages = atoi(argv[2]);

  // Memory allocation
  bodyfilename = malloc(FILENAME_SIZE * sizeof(char));
  domain = malloc(URL_SIZE * sizeof(char));

  // Open URL list
  nodes = fopen("nodes.txt", "w");
  adj_list = fopen("adj_list.txt", "w");
  fprintf(adj_list, "          \n");
  url_list = fopen(argv[1], "r");
  if (url_list == NULL) {
    printf("Cannot open given url_list\n");
    return -3;
  }
  url_num = lines(url_list);
  url_list = fopen(argv[1], "r+");

  // Initialize CURL session
  curl_global_init(CURL_GLOBAL_ALL);
  curl_handle = curl_easy_init();
  hash_setup(max_pages);

  for (index = 0; index < url_num && index < max_pages; index++) {
    // Get next URL from the list
    URL = get_next_url(url_list);
    fprintf(nodes, "%d\n", index);
    if (index == 0) {
      strcpy(domain, URL);
      len = strlen(domain);
      domain[len] = '\0';
      hash_in(domain);
      parser_init(domain);
      printf("Domain: %s\n", domain);
      fprintf(nodes, "%s\n", domain);
    }
    else {
      len = strlen(URL);
      URL[len] = '\0';
      printf("URL[%d] = %s\n", index, URL);
      fprintf(nodes, "%s\n", URL);
    }

    //curl_easy_setopt(curl_handle, CURLOPT_NOPROGRESS, 1L);
    curl_easy_setopt(curl_handle, CURLOPT_FOLLOWLOCATION, 1L);
    curl_easy_setopt(curl_handle, CURLOPT_WRITEFUNCTION, write_data);

    // Open results files
    size_b = sprintf(bodyfilename, "%s.html", body);
    bodyfilename[size_b] = '\0';
    bodyfile = fopen(bodyfilename, "w");
    if (bodyfile == NULL) {
      printf("Could not create body file!\n");
      curl_easy_cleanup(curl_handle);
      return -1;
    }

    curl_easy_setopt(curl_handle, CURLOPT_WRITEDATA, bodyfile);
    curl_easy_setopt(curl_handle, CURLOPT_URL, URL);
    res = curl_easy_perform(curl_handle);
    if (res != CURLE_OK) {
      printf("CURL failed: %s\n\n", curl_easy_strerror(res));
      fprintf(adj_list, "%d: -1\n", index);
      continue; 
    }
    fclose(bodyfile);

    // Parse bodyfile
    list_size = parse(bodyfilename, "temp.txt", URL);
    printf("Parsed %d page(s)\n", list_size);
    title = get_title();
    fprintf(nodes, "%s\n", title);

    // Update url_list
    parsed_urls_list = fopen("temp.txt", "r");
    url_list_a = fopen(argv[1], "a+");
    out = 0;
    new_urls = update(url_list_a, parsed_urls_list, adj_list, list_size, index, max_pages, &out);
    url_num += new_urls;
    fprintf(nodes, "%d\n\n", out);
    printf("%d new page(s).\n\n", new_urls);

    // Close files
    curl_free(URL);
    fclose(parsed_urls_list);
    fclose(url_list_a);
  }

  // Cleanup and free
  fprintf(adj_list, "%d:", index);
  rewind(adj_list);
  fprintf(adj_list, "%d", index);

  fclose(url_list);
  fclose(adj_list);
  fclose(nodes);
  hash_cleanup();
  parser_cleanup();
  free(bodyfilename);
  free(domain);
  curl_easy_cleanup(curl_handle);
  return 0;
}

