<?php
$dsn = "mysql:host=localhost;dbname=bhdw";
$username = 'root';
$password = '123456';
$table = 'sex8_html_url';
$options = array(PDO::MYSQL_ATTR_INIT_COMMAND => "SET NAMES 'UTF8';");
$dbConnection = new PDO($dsn, $username, $password, $options);
$stmt = $dbConnection->prepare('INSERT INTO ' . $table . ' (tag,category,url) VALUES (:tag,:category,:url)');
$stmt2 = $dbConnection->prepare('SELECT COUNT(*) AS count FROM ' . $table . ' WHERE url=:url');
$opts = array(
    'http' => array(
        'method' => "GET",
        'timeout' => 60,
    )
);

$context = stream_context_create($opts);

$isReGet = isset($argv[1]) ? $argv[1] : FALSE; //是否二次抓取，如果是只抓取前5页
$pages = isset($argv[2]) ? $argv[2] : 5; //是否二次抓取，如果是只抓取前5页
include 'HtmlParserModel.php';
$fids = array(
    '153' => '美女玉乳',
    '226' => '制服诱惑',
);
$url = 'http://avbaidu.net/thread-htm-fid-';
foreach ($fids as $fid => $tag) {
    $t1 = time();
    if ($isReGet) {
        $pageTotal = $pages; //重复提交使用
        echo '重复获取帖子列表，获取' . $pageTotal . "页\n";
    } else {
        $pageTotal = getTotalPage($url . $fid . '.html', $tag);
    }
    for ($i = 1; $i <= $pageTotal; $i++) {//貌似最多只输出600页的，600页以后的也是输出的600页的
        getLinks($url . $fid . '-page-' . $i . '.html', $tag, $i, $pageTotal);
    }

    echo str_repeat('=', 80) . "\n";
    echo '[耗时信息]共耗时' . (time() - $t1) . '秒' . "\n";
}

//获取帖子信息 板块 主题 标题 链接
function getLinks($url, $tag, $page, $pageTotal) {
    global $stmt, $stmt2;
    @$webData = get_web_data($url);
    if ($webData) {
        $html = new HtmlParserModel($webData);
        foreach ($html->find2('a.f14') as $index => $category) {
            $a = new HtmlParserModel('<html><body>' . $category->parent->value . '</body></html>');
            $data = array(':tag' => $tag, ':category' => $category->getPlainText(), ':url' => $a->find('a.subject', 0)->attribute['href']);
            if (strpos($data[':url'], '-fpage-')) {
                $data[':url'] = str_replace(substr($data[':url'], strpos($data[':url'], '-fpage-'), (strpos($data[':url'], '.html') - strpos($data[':url'], '-fpage-'))), '', $data[':url']);
            }
            $stmt2->execute(array(':url' => $data[':url']));
            $count = $stmt2->fetch(PDO::FETCH_ASSOC);
            if ($count['count']) {
                echo '[插入失败]' . $data[':url'] . '已存在' . "\n";
            } else {
                $stmt->execute($data);
                echo '[插入成功]' . $tag . '|' . $page . '/' . $pageTotal . '|' . ($index + 1) . "\n";
            }
            $category = $a = $data = NULL;
        }
        $html = $tag = $url = NULL;
    } else {
        echo '[获取失败]' . $tag . '|' . $page . '/' . $pageTotal . ' 页获取失败' . "\n";
    }
    $webData = $pageTotal = NULL;
}

//获取板块总页数
function getTotalPage($url, $tag) {
    @$webData = get_web_data($url);
    $pageTotal = 0;
    if ($webData) {
        $html = new HtmlParserModel($webData);
        if (!method_exists($html->find('span.pagesone', 0), 'find')) {
            echo '[总页数获取失败]' . $tag . '|' . $url . "\n";
        } else {
            $pages = $html->find('span.pagesone', 0)->find('span', 0)->getPlainText();
            $pages = explode('/', $pages);
            $html = $webData = NULL;
            $pageTotal = (int) $pages[1];
            echo '[总页数获取成功]' . $tag . '共' . $pageTotal . '页' . "\n";
        }
        return $pageTotal; //总页数，url后跟上-page-2代表第二页
    } else {
        echo '[总页数获取失败]' . $tag . '|' . $url . "\n";
        return $pageTotal;
    }
}


//获取网页
function get_web_data($url) {
    global $context;
    for ($i = 0; $i < 3; $i++) {
        @$web_data = file_get_contents($url, FALSE, $context);
        if ($web_data) {
            return $web_data;
        }
    }
    return FALSE;
}
