/**
 * @Author: Zerone
 * @Date: 2016.12.20
 * <p>
 * Analyze requests and responses by using Fiddler, I obtained a serials of urls,
 * which will response Json format packets. Analyze those data by using org.Json,
 * we can acquire videos' attributes and the total of videos of each category,
 * thereby calculate the maximum number of pages. After crawling the last page
 * of information, insert them into the database.
 * </p>
 */

package crawler;

import database.DataBase;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.json.JSONArray;
import org.json.JSONObject;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import video.Video;

import java.net.HttpURLConnection;
import java.net.URL;
import java.util.HashSet;

public class Crawler extends Thread {
    private String url;
    private int currentPage = 1;
    private int maxPage = 1;
    private int tid;
    private HashSet<Video> videos = new HashSet<>();

    private static HttpClient httpClient = HttpClients.createDefault();
    private static final String PREFIX = "http://www.flvcd.com/parse.php?format=&kw=http%3A%2F%2Fwww.bilibili.com%2Fvideo%2Fav";

    public Crawler(String initURL) {
        this.url = initURL;
    }

    /**
     * Modify the page in the url.
     */
    private void updateUrl() {
        int p = url.lastIndexOf("pn=");
        String begin = url.substring(0, p + 3);
        String end = url.substring(url.lastIndexOf("&"));
        url = begin + currentPage + end;
    }

    /**
     * Make the request and obtain the response.
     *
     * @param url target url to request
     * @return response content
     */
    private static String getResponse(String url) throws Exception {
        HttpGet req = new HttpGet(url);
        req.addHeader("Accept", "application/javascript, */*;q=0.8");
        req.addHeader("Accept-Encoding", "gzip, deflate,sdch");
        req.addHeader("Accept-Language", "zh-Hans-CN,zh-Hans;q=0.5");
        req.addHeader("Content-Type", "text/html; charset=UTF-8");
        req.addHeader("User-Agent", "Mozilla/5.0 (MSIE 9.0; Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.79 Safari/537.36 Edge/14.14393");

        HttpResponse rep = httpClient.execute(req);
        HttpEntity repEntity = rep.getEntity();
        return EntityUtils.toString(repEntity);
    }

    /**
     * Get the redirect address.
     *
     * @param path download lind
     * @return the real address
     */
    private static String getRedirectUrl(String path) throws Exception {
        HttpURLConnection conn = (HttpURLConnection) new URL(path).openConnection();
        conn.setInstanceFollowRedirects(false);
        conn.setConnectTimeout(5000);
        return conn.getHeaderField("Location");
    }

    /**
     * Get the download link of the video by http://www.flvcd.com/
     * Request the pertinent url and obtain html format String.
     * Parse the html and filter the download link. If it redirects,
     * need to acquire the real link.
     *
     * @param aid the av number of the video that needs to be downloaded.
     */
    public static String getDownloadLink(int aid) throws Exception {
        String url = PREFIX + aid + "%2F";
        String html = getResponse(url);

        Document doc = Jsoup.parse(html);
        String link = doc.select("a.link").first().attr("href");
        String realLink = getRedirectUrl(link);
        return realLink == null ? link : realLink;
    }

    // Calculate the total pages by page JSONObject in the response.
    private void calMaxPage(JSONObject page) {
        int count = page.getInt("count");
        int size = page.getInt("size");
        if (count % size == 0) {
            maxPage = count / size;
        } else {
            maxPage = count / size + 1;
        }
    }

    // Acquire the tid in the url.
    private void calTid() {
        int begin = url.indexOf("tid=") + 4;
        int end = url.lastIndexOf("&pn=");
        tid = Integer.parseInt(url.substring(begin, end));
    }

    /**
     * Analyze the json content.
     *
     * @param content The json format response content, such as
     *                "jQuery...({"code":0,"data":{"archives":[...],"page":{"count":20788,"num":1,"size":20}},"message":""});"
     */
    private void analyzeJSONResponse(String content) throws Exception {
        String temp = content.substring(content.indexOf("{"),
                content.lastIndexOf("}") + 1);

        JSONObject info = new JSONObject(temp);
        JSONObject data = info.getJSONObject("data");
        JSONArray archives = data.getJSONArray("archives");

        if (maxPage == 1) {
            JSONObject page = data.getJSONObject("page");
            calMaxPage(page);
            calTid();
        }

        // Store the video whose favorite is greater than 0 only.
        for (int i = 0; i < archives.length(); ++i) {
            Video video = new Video((JSONObject) archives.get(i));
            if (video.getFavorites() > 100) {
                videos.add(video);
            }
        }
    }

    @Override
    public void run() {
        try {
            while (currentPage <= maxPage) {
                updateUrl();
                String content = getResponse(this.url);
                try {
                    analyzeJSONResponse(content);
                } catch (Exception ae) {
                    System.out.println("catch" + tid + ": " + currentPage);
                    ae.printStackTrace();
                }
                currentPage++;
            }

        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            System.out.println(tid + ": " + videos.size() + " videos; begin to save data...");
            if (videos.size() > 0) {
                try {
                    DataBase.save(videos, tid);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
    }
}
