﻿using System.Collections.Generic;
using System.Diagnostics;
using MyCompany.WebCrawler.Interfaces;
using MyCompany.WebCrawler.Repositories;

namespace MyCompany.WebCrawler.Tasks.Tasks
{
    public class GetLinksFromTask : ITask
    {
        private readonly IDownloader downloader;
        private readonly IRepository repository;
        private readonly TasksFactory factory;

        public string Url { get; set; }

        public GetLinksFromTask(TasksFactory factory, IDownloader downloader, IRepository repository)
        {
            this.downloader = downloader;
            this.repository = repository;
            this.factory = factory;
        }

        public void Execute()
        {
            if (string.IsNullOrEmpty(Url))
            {
                Trace.WriteLine("No URL for crawl!");
                return;
            }

            var links = ExecuteLoad();
            CreateNewTasks(links);
        }

        public void CreateNewTasks(List<string> links)
        {
            foreach (var url in links)
            {
                repository.Queue.Enqueue(factory.CreateGetLinks(url));
                repository.Queue.Enqueue(factory.CreateShowNewLinkTask());
            }
        }

        public List<string> ExecuteLoad()
        {
            Trace.WriteLine("Download from: " + Url);

            var links = downloader.GetLinksFrom(Url);
            links = repository.RemoveDuplicats(links);
            links.ForEach(l => repository.Save(l));
            return links;
        }

        public override string ToString()
        {
            Trace.WriteLine("CreateGetLink: " + Url);
            return base.ToString();
        }
    }
}