package shell

import (
	"encoding/json"
	"fmt"
	"strconv"
	"time"

	"gitee.com/chsir/base-spider-engine/config"
	"gitee.com/chsir/base-spider-engine/models"
)

// 从web_pages表同步数据到影子表web_status
func AutoSyncPagesToStatus() {
	t := time.Now()
	var count int64
	db := config.DB_MYSQL

	for i := 0; i < 16; i++ {
		pageTable := "web_pages_" + strconv.Itoa(i)
		statusTable := "web_status_" + strconv.Itoa(i)

		sql := "INSERT INTO " + statusTable + " SELECT `id`,`url`,`host`,`craw_done`,`craw_time` FROM " + pageTable + " WHERE id > COALESCE((SELECT max(id) FROM " + statusTable + "), 0); "

		result := db.Exec(sql)
		count += result.RowsAffected
	}

	useTime := time.Now().Unix() - t.Unix()
	if count > 0 {
		fmt.Printf("从 web_pages 表同步了 %d 条数据到 web_status 表, 共耗时：%d 秒", count, useTime)
	}

}

// 从影子表web_status获取数据，放入redis的待爬列表里面
func GetStatusDataToRedis() {
	var pageNum = 20 // 每次从status表里取出的数
	redis := config.DB_REDIS
	mysql := config.DB_MYSQL

	for i := 0; i < 16; i++ {
		tableName := fmt.Sprintf("web_status_%x", i)

		// 当前表 已经拷到redis的最大id 的key
		statusMaxIdKey := fmt.Sprintf("base_spider_%s_max_id", tableName)

		maxId, _ := redis.Get(config.Ctx, statusMaxIdKey).Int()
		if maxId == 0 { // 如果status表存在redis的最大id为0 则从need_craw_list表取出最最左边的数据 防止重复从status表取数据
			tmpStatus, err := redis.LRange(config.Ctx, needCrawList, 0, 0).Result()
			if err == nil && len(tmpStatus) > 0 {
				var leftStatusData models.Status
				json.Unmarshal([]byte(tmpStatus[0]), &leftStatusData)
				maxId = leftStatusData.Id
			}
		}

		domainBlackListRedis, _ := redis.SMembers(config.Ctx, domainBlackKey).Result()
		if len(domainBlackListRedis) == 0 { // redis里面的黑名单域名列表为空
			if len(Domain_black_list) > 0 {
				for v, _ := range Domain_black_list {
					domainBlackListRedis = append(domainBlackListRedis, v)
				}
			}
		}

		var statusNeedCopyData []models.Status
		sql := mysql.Table(tableName).
			Where("craw_done = ?", 0).
			Where("id > ?", maxId)
		if len(domainBlackListRedis) > 0 {
			sql = sql.Where("host NOT IN (?)", domainBlackListRedis)
		}
		sql.Debug().Order("id").Limit(pageNum).Find(&statusNeedCopyData)
		fmt.Println(statusNeedCopyData)

		if len(statusNeedCopyData) > 0 {
			for _, v := range statusNeedCopyData {
				json, _ := json.Marshal(v)
				redis.LPush(config.Ctx, needCrawList, json)
			}
			redis.Set(config.Ctx, statusMaxIdKey, statusNeedCopyData[len(statusNeedCopyData)-1].Id, time.Hour)
		}

	}
}
