package service

import (
	"context"
	"errors"
	"fmt"
	"log"
	"net/url"
	"ollama-go/dto"
	"ollama-go/entity"
	"ollama-go/query"
	"ollama-go/vo"
	"strings"
	"time"

	"github.com/PuerkitoBio/goquery"
	"github.com/androidsr/sc-go/model"
	"github.com/androidsr/sc-go/sbuilder"
	"github.com/androidsr/sc-go/sc"
	"github.com/androidsr/sc-go/sgorm"
	"github.com/androidsr/sc-go/sno"
	"github.com/chromedp/cdproto/network"
	"github.com/chromedp/chromedp"
)

var crawlerSelecterService *CrawlerSelecterService

type CrawlerSelecterService struct {
}

func NewCrawlerSelecterService() *CrawlerSelecterService {
	if crawlerSelecterService == nil {
		crawlerSelecterService = new(CrawlerSelecterService)
	}
	return crawlerSelecterService
}

// 使用id获取数据
func (m CrawlerSelecterService) Get(id string) *vo.CrawlerSelecterVO {
	data := new(entity.CrawlerSelecter)
	data.Id = id
	err := sgorm.DB.GetOne(data)
	if err != nil {
		return nil
	}
	return sc.Copy[vo.CrawlerSelecterVO](data)
}

// 使用name获取数据
func (m CrawlerSelecterService) FindByName(name string) (*vo.CrawlerSelecterVO, error) {
	data := new(entity.CrawlerSelecter)
	data.Name = name
	err := sgorm.DB.GetOne(data)
	if err != nil {
		fmt.Println(err)
		return nil, err
	}
	return sc.Copy[vo.CrawlerSelecterVO](data), nil
}

// 保存数据
func (m CrawlerSelecterService) Save(dto *dto.CrawlerSelecterDTO) error {
	data := sc.Copy[entity.CrawlerSelecter](dto)
	data.Id = sno.GetString()
	err := sgorm.DB.Insert(data)
	return err
}

// 更新数据
func (m CrawlerSelecterService) UpdateById(dto *dto.CrawlerSelecterDTO) error {
	dbEntity := new(entity.CrawlerSelecter)
	dbEntity.Id = dto.Id
	sgorm.DB.GetOne(dbEntity)
	if dbEntity.TenantId != "" && dbEntity.TenantId != dto.TenantId {
		return errors.New("非本人创建不能修改")
	}
	sc.CopyTo(dto, dbEntity)
	err := sgorm.DB.UpdateById(dbEntity).Error
	return err
}

// 删除数据
func (m CrawlerSelecterService) DeleteById(id string) error {
	err := sgorm.DB.DeleteById(entity.CrawlerSelecter{}, id)
	return err
}

// 分页列表查询
func (m CrawlerSelecterService) QueryPage(query *query.CrawlerSelecterQueryDTO) *model.PageResult {
	sql := `select * from crawler_selecter a `
	data := make([]entity.CrawlerSelecter, 0)
	b := sbuilder.StructToBuilder(query, sql)
	sql, values := b.Build()
	return sgorm.DB.SelectPage(&data, query.BaseQueryDTO.Page, sql, values...)
}

// 分页下拉查询
func (m CrawlerSelecterService) QueryList(query *model.SelectQueryDTO) *model.PageResult {
	sql := `select a.id as value, a.name as label from crawler_selecter a where 1=1 `
	data := make([]model.SelectVO, 0)
	b := sbuilder.Builder(sql)
	b.Like("CONCAT(a.id,a.domain_name)", query.Label)
	for k, v := range query.Vars {
		b.Eq(k, v)
	}
	b.Multiple().Ors(b.In("a.id", query.Selected))
	sql, values := b.Build()
	return sgorm.DB.SelectPage(&data, &query.Page, sql, values...)
}

func (m CrawlerSelecterService) GetDomainInfo(urlPath string, typ string) *entity.CrawlerSelecter {
	data := &entity.CrawlerSelecter{}

	u, err := url.Parse(urlPath)
	var hostname string
	if err != nil {
		hostname = urlPath
	} else {
		hostname = u.Hostname()
	}
	err = sgorm.DB.Where("domain_url like ? and type = ?", `%`+hostname+`%`, typ).First(&data).Error
	if err != nil {
		return nil
	}
	return data
}

type Chromedp struct {
	ctx    context.Context
	cancel context.CancelFunc
}

func NewChromedp() *Chromedp {
	m := new(Chromedp)
	options := []chromedp.ExecAllocatorOption{
		// false意思是展示浏览器窗口，默认true
		chromedp.Flag("headless", true),
		chromedp.Flag("hide-scrollbars", false),
		chromedp.Flag("mute-audio", false),
		chromedp.Flag("blink-settings", "imagesEnabled=false"), //不加载图片，提高速度
		chromedp.UserAgent(`Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36`),
	}
	options = append(chromedp.DefaultExecAllocatorOptions[:], options...)

	ctx, _ := chromedp.NewExecAllocator(context.Background(), options...)
	m.ctx, m.cancel = chromedp.NewContext(ctx)
	return m
}

func (m *Chromedp) Close() error {
	m.cancel()
	return nil
}

// 获取网页元素
func (m *Chromedp) GetHtml(url string) string {
	ctx, cancel := context.WithTimeout(m.ctx, 15*time.Second)
	defer cancel()
	var htmlContent string
	err := chromedp.Run(
		ctx,
		chromedp.Tasks{
			network.Enable(),
			chromedp.Navigate(url),
			chromedp.WaitVisible(`body`, chromedp.ByQuery), // 等待页面主体加载完成
			chromedp.Sleep(5 * time.Second),                // 再等待 5 秒
			chromedp.OuterHTML(`html`, &htmlContent),       // 获取页面的完整 HTML
		},
	)
	if err != nil {
		return ""
	}
	return htmlContent
}

// 获取网页中的文本内容
func (m *Chromedp) GetContent(url string, selecter string) (string, error) {
	ctx, cancel := context.WithTimeout(m.ctx, 15*time.Second)
	defer cancel()
	var text string
	if selecter == "" {
		selecter = "body"
	}
	err := chromedp.Run(ctx,
		network.Enable(),
		chromedp.Navigate(url),
		chromedp.WaitVisible(selecter, chromedp.ByQuery), // 等待页面主体加载完成
		chromedp.Sleep(5*time.Second),                    // 再等待 5 秒
		chromedp.Text(selecter, &text, chromedp.NodeVisible, chromedp.ByQuery),
	)
	if err != nil {
		return "", err
	}
	return text, nil
}

// 从HTML文本中提取连接
func (m *Chromedp) GetLink(url string, selecter string) []string {
	ctx, _ := context.WithTimeout(m.ctx, 15*time.Second)
	var htmlContent string
	if selecter == "" {
		selecter = "body"
	}
	tasks := chromedp.Tasks{
		network.Enable(),
		chromedp.Navigate(url),
		chromedp.WaitVisible(`body`, chromedp.ByQuery), // 等待页面主体加载完成
		chromedp.Sleep(5 * time.Second),                // 再等待 5 秒
		chromedp.OuterHTML(`html`, &htmlContent),       // 获取页面的完整 HTML
	}
	err := chromedp.Run(
		ctx,
		tasks,
	)
	if err != nil {
		log.Printf("获取网页链接失败：%s", err.Error())
		return nil
	}
	return m.GetLinkByContent(htmlContent, selecter)
}

func (m *Chromedp) GetContentBySelecter(htmlContent string, selecter string) []string {
	doc, err := goquery.NewDocumentFromReader(strings.NewReader(htmlContent))
	if err != nil {
		log.Printf("提取网页链接地址：%s", err.Error())
		return nil
	}
	texts := make([]string, 0)
	doc.Find(selecter).Each(func(i int, item *goquery.Selection) {
		texts = append(texts, item.Text())
	})
	return texts
}

func (m *Chromedp) GetLinkByContent(htmlContent string, selecter string) []string {
	links := make([]string, 0)
	doc, err := goquery.NewDocumentFromReader(strings.NewReader(htmlContent))
	if err != nil {
		log.Printf("提取网页链接地址：%s", err.Error())
		return links
	}
	doc.Find(selecter).Each(func(i int, s *goquery.Selection) {
		link, exists := s.Attr("href")
		if exists && (strings.Contains(link, "http://") || strings.Contains(link, "https://")) {
			links = append(links, link)
		}
	})
	return links
}
