<?php


namespace App\Bx;


use App\Job\IJob;
use App\Utils\HttpUtil;
use App\Utils\Logger;
use Goutte\Client;
use Symfony\Component\DomCrawler\Crawler;

class BxCrawlJob extends IJob
{

    protected $url;

    protected $topic;

    public function __construct($url, BxTopic $topic)
    {
        $this->url = $url;
        $this->topic = $topic;
    }

    public function handle()
    {
        $client = new Client();
        $client->setClient((new HttpUtil())->getClient());
        try{
            $crawler = $client->request('GET', $this->url);
        }catch (\Exception $e) {
            Logger::getLogger('request')->error($e->getMessage());
            return;
        }

        $datas = [];
        $crawler->filter('div.tc_nr li')->each(function (Crawler $node) use (&$datas, $client) {
            $host = 'http://8xxka.com';
            $data['href'] = $node->filter('div.t_p a')->first()->attr('href');
            $data['duration'] = $node->filter('div.t_p span')->first()->text();
            $data['origin_image'] = $node->filter('div.t_p img')->first()->attr('data-original');
            $data['title'] = $node->filter('h3')->text();
            $data['date_time'] = $node->filter('div.w_z span')->first()->text();
            preg_match('/(\d+)/', $data['href'], $ids);
            $data['id'] = $ids[0];
//            Logger::getLogger('ids')->debug($ids[0]);
            try{
                $data = array_merge($data, $this->getPlayUrl($client, $host.$data['href']));
            }catch (\Exception $e) {

            }
            $datas[] = $data;
        });
        dispatch(new BxSaveJob($datas))->onQueue($this->topic->name);
//        (new BxSaveJob($datas))->handle();
    }

    protected function getPlayUrl(Client $client, $detail_url)
    {
        $crawler = $client->request('GET', $detail_url);
        $data['address'] = $crawler->filter('#vpath')->first()->text();
        $data['download_address'] = $crawler->filter('div.sp_kj > .x_z > a')->first()->attr('href');
        return $data;
    }
}